You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@apex.apache.org by th...@apache.org on 2016/05/18 20:41:51 UTC

[01/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Repository: incubator-apex-malhar
Updated Branches:
  refs/heads/master 029291d47 -> 3ce83708f


http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java
index deac363..ef946ac 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/DTFileTest.java
@@ -18,25 +18,28 @@
  */
 package org.apache.hadoop.io.file.tfile;
 
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+
 import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.BytesWritable;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Random;
 
 public class DTFileTest
 {
-   private static String ROOT =
+  private static String ROOT =
       System.getProperty("test.build.data", "target/tfile-test");
 
   private Configuration conf;
@@ -48,7 +51,8 @@ public class DTFileTest
   private KVGenerator kvGen;
 
 
-  static class TestConf {
+  static class TestConf
+  {
     public int minWordLen = 5;
     public int maxWordLen = 20;
     public int dictSize = 1000;
@@ -77,23 +81,18 @@ public class DTFileTest
     fs = path.getFileSystem(conf);
     timer = new NanoTimer(false);
     rng = new Random();
-    keyLenGen =
-        new RandomDistribution.Zipf(new Random(rng.nextLong()),
-            tconf.minKeyLen, tconf.maxKeyLen, 1.2);
-    RandomDistribution.DiscreteRNG valLenGen =
-        new RandomDistribution.Flat(new Random(rng.nextLong()),
-            tconf.minValLength, tconf.maxValLength);
-    RandomDistribution.DiscreteRNG wordLenGen =
-        new RandomDistribution.Flat(new Random(rng.nextLong()),
-            tconf.minWordLen, tconf.maxWordLen);
-    kvGen =
-        new KVGenerator(rng, true, keyLenGen, valLenGen, wordLenGen,
-            tconf.dictSize);
+    keyLenGen = new RandomDistribution.Zipf(new Random(rng.nextLong()), tconf.minKeyLen, tconf.maxKeyLen, 1.2);
+    RandomDistribution.DiscreteRNG valLenGen = new RandomDistribution.Flat(new Random(rng.nextLong()),
+        tconf.minValLength, tconf.maxValLength);
+    RandomDistribution.DiscreteRNG wordLenGen = new RandomDistribution.Flat(new Random(rng.nextLong()),
+        tconf.minWordLen, tconf.maxWordLen);
+    kvGen = new KVGenerator(rng, true, keyLenGen, valLenGen, wordLenGen,
+        tconf.dictSize);
   }
 
 
-  private static FSDataOutputStream createFSOutput(Path name, FileSystem fs)
-      throws IOException {
+  private static FSDataOutputStream createFSOutput(Path name, FileSystem fs) throws IOException
+  {
     if (fs.exists(name)) {
       fs.delete(name, true);
     }
@@ -110,9 +109,7 @@ public class DTFileTest
     byte[] key = new byte[16];
     ByteBuffer bb = ByteBuffer.wrap(key);
     try {
-      DTFile.Writer writer =
-          new DTFile.Writer(fout, tconf.minBlockSize, tconf.compress, "memcmp",
-              conf);
+      DTFile.Writer writer = new DTFile.Writer(fout, tconf.minBlockSize, tconf.compress, "memcmp", conf);
       try {
         BytesWritable tmpKey = new BytesWritable();
         BytesWritable val = new BytesWritable();
@@ -129,18 +126,16 @@ public class DTFileTest
               .getSize());
           tuples++;
         }
-      }
-      finally {
+      } finally {
         writer.close();
       }
-    }
-    finally {
+    } finally {
       fout.close();
     }
 
     long fsize = fs.getFileStatus(path).getLen();
 
-    System.out.println("Total tuple wrote " + tuples + " File size " + fsize / (1024.0 * 1024));
+    LOG.debug("Total tuple wrote {} File size {}", tuples, fsize / (1024.0 * 1024));
   }
 
 
@@ -180,13 +175,13 @@ public class DTFileTest
     long hit = CacheManager.getCache().stats().hitCount();
     scanner.lowerBound(key);
     Assert.assertEquals("Cache contains some blocks ", CacheManager.getCacheSize(), numBlocks);
-    Assert.assertEquals("Cache hit ", CacheManager.getCache().stats().hitCount(), hit+1);
+    Assert.assertEquals("Cache hit ", CacheManager.getCache().stats().hitCount(), hit + 1);
 
     /* test cache miss */
     scanner.close();
     hit = CacheManager.getCache().stats().hitCount();
     long oldmiss = CacheManager.getCache().stats().missCount();
-    ikey = tuples-1;
+    ikey = tuples - 1;
     bb.clear();
     bb.putLong(ikey);
     numBlocks = CacheManager.getCacheSize();
@@ -219,4 +214,6 @@ public class DTFileTest
     writeTFile();
   }
 
+  private static final Logger LOG = LoggerFactory.getLogger(DTFileTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
index 12857d7..f92d9aa 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
@@ -23,22 +23,23 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.Arrays;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader;
-import org.apache.hadoop.io.file.tfile.DTFile.Writer;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner;
+import org.apache.hadoop.io.file.tfile.DTFile.Writer;
+
+import junit.framework.TestCase;
 
 /**
  * test tfile features.
  * 
  */
-public class TestDTFile extends TestCase {
+public class TestDTFile extends TestCase
+{
   private static String ROOT =
       System.getProperty("test.build.data", "target/tfile-test");
   private FileSystem fs;
@@ -48,18 +49,21 @@ public class TestDTFile extends TestCase {
   private static final String localFormatter = "%010d";
 
   @Override
-  public void setUp() throws IOException {
+  public void setUp() throws IOException
+  {
     conf = new Configuration();
     fs = FileSystem.get(conf);
   }
 
   @Override
-  public void tearDown() throws IOException {
+  public void tearDown() throws IOException
+  {
     // do nothing
   }
 
   // read a key from the scanner
-  public byte[] readKey(Scanner scanner) throws IOException {
+  public byte[] readKey(Scanner scanner) throws IOException
+  {
     int keylen = scanner.entry().getKeyLength();
     byte[] read = new byte[keylen];
     scanner.entry().getKey(read);
@@ -67,7 +71,8 @@ public class TestDTFile extends TestCase {
   }
 
   // read a value from the scanner
-  public byte[] readValue(Scanner scanner) throws IOException {
+  public byte[] readValue(Scanner scanner) throws IOException
+  {
     int valueLen = scanner.entry().getValueLength();
     byte[] read = new byte[valueLen];
     scanner.entry().getValue(read);
@@ -75,7 +80,8 @@ public class TestDTFile extends TestCase {
   }
 
   // read a long value from the scanner
-  public byte[] readLongValue(Scanner scanner, int len) throws IOException {
+  public byte[] readLongValue(Scanner scanner, int len) throws IOException
+  {
     DataInputStream din = scanner.entry().getValueStream();
     byte[] b = new byte[len];
     din.readFully(b);
@@ -86,7 +92,8 @@ public class TestDTFile extends TestCase {
   // write some records into the tfile
   // write them twice
   private int writeSomeRecords(Writer writer, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     String value = "value";
     for (int i = start; i < (start + n); i++) {
       String key = String.format(localFormatter, i);
@@ -98,7 +105,8 @@ public class TestDTFile extends TestCase {
 
   // read the records and check
   private int readAndCheckbytes(Scanner scanner, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     String value = "value";
     for (int i = start; i < (start + n); i++) {
       byte[] key = readKey(scanner);
@@ -125,7 +133,8 @@ public class TestDTFile extends TestCase {
   // write some large records
   // write them twice
   private int writeLargeRecords(Writer writer, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     byte[] value = new byte[largeVal];
     for (int i = start; i < (start + n); i++) {
       String key = String.format(localFormatter, i);
@@ -138,7 +147,8 @@ public class TestDTFile extends TestCase {
   // read large records
   // read them twice since its duplicated
   private int readLargeRecords(Scanner scanner, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = start; i < (start + n); i++) {
       byte[] key = readKey(scanner);
       String keyStr = String.format(localFormatter, i);
@@ -154,7 +164,8 @@ public class TestDTFile extends TestCase {
   }
 
   // write empty keys and values
-  private void writeEmptyRecords(Writer writer, int n) throws IOException {
+  private void writeEmptyRecords(Writer writer, int n) throws IOException
+  {
     byte[] key = new byte[0];
     byte[] value = new byte[0];
     for (int i = 0; i < n; i++) {
@@ -163,7 +174,8 @@ public class TestDTFile extends TestCase {
   }
 
   // read empty keys and values
-  private void readEmptyRecords(Scanner scanner, int n) throws IOException {
+  private void readEmptyRecords(Scanner scanner, int n) throws IOException
+  {
     byte[] key = new byte[0];
     byte[] value = new byte[0];
     byte[] readKey = null;
@@ -178,7 +190,8 @@ public class TestDTFile extends TestCase {
   }
 
   private int writePrepWithKnownLength(Writer writer, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     // get the length of the key
     String key = String.format(localFormatter, start);
     int keyLen = key.getBytes().length;
@@ -198,7 +211,8 @@ public class TestDTFile extends TestCase {
   }
 
   private int readPrepWithKnownLength(Scanner scanner, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = start; i < (start + n); i++) {
       String key = String.format(localFormatter, i);
       byte[] read = readKey(scanner);
@@ -212,7 +226,8 @@ public class TestDTFile extends TestCase {
   }
 
   private int writePrepWithUnkownLength(Writer writer, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = start; i < (start + n); i++) {
       DataOutputStream out = writer.prepareAppendKey(-1);
       String localKey = String.format(localFormatter, i);
@@ -227,7 +242,8 @@ public class TestDTFile extends TestCase {
   }
 
   private int readPrepWithUnknownLength(Scanner scanner, int start, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = start; i < start; i++) {
       String key = String.format(localFormatter, i);
       byte[] read = readKey(scanner);
@@ -235,8 +251,7 @@ public class TestDTFile extends TestCase {
       try {
         read = readValue(scanner);
         assertTrue(false);
-      }
-      catch (IOException ie) {
+      } catch (IOException ie) {
         // should have thrown exception
       }
       String value = "value" + key;
@@ -247,11 +262,13 @@ public class TestDTFile extends TestCase {
     return (start + n);
   }
 
-  private byte[] getSomeKey(int rowId) {
+  private byte[] getSomeKey(int rowId)
+  {
     return String.format(localFormatter, rowId).getBytes();
   }
 
-  private void writeRecords(Writer writer) throws IOException {
+  private void writeRecords(Writer writer) throws IOException
+  {
     writeEmptyRecords(writer, 10);
     int ret = writeSomeRecords(writer, 0, 100);
     ret = writeLargeRecords(writer, ret, 1);
@@ -260,7 +277,8 @@ public class TestDTFile extends TestCase {
     writer.close();
   }
 
-  private void readAllRecords(Scanner scanner) throws IOException {
+  private void readAllRecords(Scanner scanner) throws IOException
+  {
     readEmptyRecords(scanner, 10);
     int ret = readAndCheckbytes(scanner, 0, 100);
     ret = readLargeRecords(scanner, ret, 1);
@@ -268,8 +286,11 @@ public class TestDTFile extends TestCase {
     ret = readPrepWithUnknownLength(scanner, ret, 50);
   }
 
-  private FSDataOutputStream createFSOutput(Path name) throws IOException {
-    if (fs.exists(name)) fs.delete(name, true);
+  private FSDataOutputStream createFSOutput(Path name) throws IOException
+  {
+    if (fs.exists(name)) {
+      fs.delete(name, true);
+    }
     FSDataOutputStream fout = fs.create(name);
     return fout;
   }
@@ -277,7 +298,8 @@ public class TestDTFile extends TestCase {
   /**
    * test none codecs
    */
-  void basicWithSomeCodec(String codec) throws IOException {
+  void basicWithSomeCodec(String codec) throws IOException
+  {
     Path ncTFile = new Path(ROOT, "basic.tfile");
     FSDataOutputStream fout = createFSOutput(ncTFile);
     Writer writer = new Writer(fout, minBlockSize, codec, "memcmp", conf);
@@ -330,7 +352,8 @@ public class TestDTFile extends TestCase {
   }
 
   // unsorted with some codec
-  void unsortedWithSomeCodec(String codec) throws IOException {
+  void unsortedWithSomeCodec(String codec) throws IOException
+  {
     Path uTfile = new Path(ROOT, "unsorted.tfile");
     FSDataOutputStream fout = createFSOutput(uTfile);
     Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
@@ -349,19 +372,22 @@ public class TestDTFile extends TestCase {
     fs.delete(uTfile, true);
   }
 
-  public void testTFileFeatures() throws IOException {
+  public void testTFileFeatures() throws IOException
+  {
     basicWithSomeCodec("none");
     basicWithSomeCodec("gz");
   }
 
   // test unsorted t files.
-  public void testUnsortedTFileFeatures() throws IOException {
+  public void testUnsortedTFileFeatures() throws IOException
+  {
     unsortedWithSomeCodec("none");
     unsortedWithSomeCodec("gz");
   }
 
   private void writeNumMetablocks(Writer writer, String compression, int n)
-      throws IOException {
+      throws IOException
+  {
     for (int i = 0; i < n; i++) {
       DataOutputStream dout =
           writer.prepareMetaBlock("TfileMeta" + i, compression);
@@ -372,25 +398,26 @@ public class TestDTFile extends TestCase {
   }
 
   private void someTestingWithMetaBlock(Writer writer, String compression)
-      throws IOException {
+      throws IOException
+  {
     DataOutputStream dout = null;
     writeNumMetablocks(writer, compression, 10);
     try {
       dout = writer.prepareMetaBlock("TfileMeta1", compression);
       assertTrue(false);
-    }
-    catch (MetaBlockAlreadyExists me) {
+    } catch (MetaBlockAlreadyExists me) {
       // avoid this exception
     }
     dout = writer.prepareMetaBlock("TFileMeta100", compression);
     dout.close();
   }
 
-  private void readNumMetablocks(Reader reader, int n) throws IOException {
+  private void readNumMetablocks(Reader reader, int n) throws IOException
+  {
     int len = ("something to test" + 0).getBytes().length;
     for (int i = 0; i < n; i++) {
       DataInputStream din = reader.getMetaBlock("TfileMeta" + i);
-      byte b[] = new byte[len];
+      byte[] b = new byte[len];
       din.readFully(b);
       assertTrue("faield to match metadata", Arrays.equals(
           ("something to test" + i).getBytes(), b));
@@ -398,14 +425,14 @@ public class TestDTFile extends TestCase {
     }
   }
 
-  private void someReadingWithMetaBlock(Reader reader) throws IOException {
+  private void someReadingWithMetaBlock(Reader reader) throws IOException
+  {
     DataInputStream din = null;
     readNumMetablocks(reader, 10);
     try {
       din = reader.getMetaBlock("NO ONE");
       assertTrue(false);
-    }
-    catch (MetaBlockDoesNotExist me) {
+    } catch (MetaBlockDoesNotExist me) {
       // should catch
     }
     din = reader.getMetaBlock("TFileMeta100");
@@ -415,7 +442,8 @@ public class TestDTFile extends TestCase {
   }
 
   // test meta blocks for tfiles
-  public void _testMetaBlocks() throws IOException {
+  public void _testMetaBlocks() throws IOException
+  {
     Path mFile = new Path(ROOT, "meta.tfile");
     FSDataOutputStream fout = createFSOutput(mFile);
     Writer writer = new Writer(fout, minBlockSize, "none", null, conf);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
index 071d752..f3479de 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
@@ -24,6 +24,11 @@ import java.io.EOFException;
 import java.io.IOException;
 import java.util.Random;
 
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -31,26 +36,23 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader;
-import org.apache.hadoop.io.file.tfile.DTFile.Writer;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader.Location;
 import org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.apache.hadoop.io.file.tfile.DTFile.Writer;
 
 /**
- * 
+ *
  * Byte arrays test case class using GZ compression codec, base class of none
  * and LZO compression classes.
- * 
+ *
  */
-public class TestDTFileByteArrays {
+public class TestDTFileByteArrays
+{
   private static String ROOT =
       System.getProperty("test.build.data", "target/tfile-test");
-  private final static int BLOCK_SIZE = 512;
-  private final static int BUF_SIZE = 64;
-  private final static int K = 1024;
+  private static final int BLOCK_SIZE = 512;
+  private static final int BUF_SIZE = 64;
+  private static final int K = 1024;
   protected boolean skip = false;
 
   private static final String KEY = "key";
@@ -76,19 +78,22 @@ public class TestDTFileByteArrays {
   private int records2ndBlock = usingNative ? 5574 : 4263;
 
   public void init(String compression, String comparator,
-      int numRecords1stBlock, int numRecords2ndBlock) {
+      int numRecords1stBlock, int numRecords2ndBlock)
+  {
     init(compression, comparator);
     this.records1stBlock = numRecords1stBlock;
     this.records2ndBlock = numRecords2ndBlock;
   }
   
-  public void init(String compression, String comparator) {
+  public void init(String compression, String comparator)
+  {
     this.compression = compression;
     this.comparator = comparator;
   }
 
   @Before
-  public void setUp() throws IOException {
+  public void setUp() throws IOException
+  {
     path = new Path(ROOT, outputFile);
     fs = path.getFileSystem(conf);
     out = fs.create(path);
@@ -96,15 +101,19 @@ public class TestDTFileByteArrays {
   }
 
   @After
-  public void tearDown() throws IOException {
-    if (!skip)
+  public void tearDown() throws IOException
+  {
+    if (!skip) {
       fs.delete(path, true);
+    }
   }
 
   @Test
-  public void testNoDataEntry() throws IOException {
-    if (skip) 
+  public void testNoDataEntry() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -116,9 +125,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testOneDataEntry() throws IOException {
-    if (skip)
+  public void testOneDataEntry() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(1);
     readRecords(1);
 
@@ -130,22 +141,26 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testTwoDataEntries() throws IOException {
-    if (skip)
+  public void testTwoDataEntries() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(2);
     readRecords(2);
   }
 
   /**
    * Fill up exactly one block.
-   * 
+   *
    * @throws IOException
    */
   @Test
-  public void testOneBlock() throws IOException {
-    if (skip)
+  public void testOneBlock() throws IOException
+  {
+    if (skip) {
       return;
+    }
     // just under one block
     writeRecords(records1stBlock);
     readRecords(records1stBlock);
@@ -155,13 +170,15 @@ public class TestDTFileByteArrays {
 
   /**
    * One block plus one record.
-   * 
+   *
    * @throws IOException
    */
   @Test
-  public void testOneBlockPlusOneEntry() throws IOException {
-    if (skip)
+  public void testOneBlockPlusOneEntry() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(records1stBlock + 1);
     readRecords(records1stBlock + 1);
     checkBlockIndex(records1stBlock - 1, 0);
@@ -169,18 +186,22 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testTwoBlocks() throws IOException {
-    if (skip)
+  public void testTwoBlocks() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(records1stBlock + 5);
     readRecords(records1stBlock + 5);
     checkBlockIndex(records1stBlock + 4, 1);
   }
 
   @Test
-  public void testThreeBlocks() throws IOException {
-    if (skip) 
+  public void testThreeBlocks() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(2 * records1stBlock + 5);
     readRecords(2 * records1stBlock + 5);
 
@@ -224,17 +245,20 @@ public class TestDTFileByteArrays {
     readKeyManyTimes(records1stBlock + 10);
   }
 
-  Location locate(Scanner scanner, byte[] key) throws IOException {
-    if (scanner.seekTo(key) == true) {
+  Location locate(Scanner scanner, byte[] key) throws IOException
+  {
+    if (scanner.seekTo(key)) {
       return scanner.currentLocation;
     }
     return scanner.endLocation;
   }
   
   @Test
-  public void testLocate() throws IOException {
-    if (skip)
+  public void testLocate() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(3 * records1stBlock);
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner = reader.createScanner();
@@ -248,9 +272,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureWriterNotClosed() throws IOException {
-    if (skip)
+  public void testFailureWriterNotClosed() throws IOException
+  {
+    if (skip) {
       return;
+    }
     Reader reader = null;
     try {
       reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -265,9 +291,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureWriteMetaBlocksWithSameName() throws IOException {
-    if (skip)
+  public void testFailureWriteMetaBlocksWithSameName() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writer.append("keyX".getBytes(), "valueX".getBytes());
 
     // create a new metablock
@@ -287,9 +315,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureGetNonExistentMetaBlock() throws IOException {
-    if (skip)
+  public void testFailureGetNonExistentMetaBlock() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writer.append("keyX".getBytes(), "valueX".getBytes());
 
     // create a new metablock
@@ -314,9 +344,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureWriteRecordAfterMetaBlock() throws IOException {
-    if (skip)
+  public void testFailureWriteRecordAfterMetaBlock() throws IOException
+  {
+    if (skip) {
       return;
+    }
     // write a key/value first
     writer.append("keyX".getBytes(), "valueX".getBytes());
     // create a new metablock
@@ -336,9 +368,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureReadValueManyTimes() throws IOException {
-    if (skip)
+  public void testFailureReadValueManyTimes() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(5);
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -360,9 +394,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureBadCompressionCodec() throws IOException {
-    if (skip)
+  public void testFailureBadCompressionCodec() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
     out = fs.create(path);
     try {
@@ -375,9 +411,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureOpenEmptyFile() throws IOException {
-    if (skip)
+  public void testFailureOpenEmptyFile() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
     // create an absolutely empty file
     path = new Path(fs.getWorkingDirectory(), outputFile);
@@ -392,9 +430,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureOpenRandomFile() throws IOException {
-    if (skip)
+  public void testFailureOpenRandomFile() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
     // create an random file
     path = new Path(fs.getWorkingDirectory(), outputFile);
@@ -416,9 +456,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureKeyLongerThan64K() throws IOException {
-    if (skip)
+  public void testFailureKeyLongerThan64K() throws IOException
+  {
+    if (skip) {
       return;
+    }
     byte[] buf = new byte[64 * K + 1];
     Random rand = new Random();
     rand.nextBytes(buf);
@@ -431,9 +473,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureOutOfOrderKeys() throws IOException {
-    if (skip)
+  public void testFailureOutOfOrderKeys() throws IOException
+  {
+    if (skip) {
       return;
+    }
     try {
       writer.append("keyM".getBytes(), "valueM".getBytes());
       writer.append("keyA".getBytes(), "valueA".getBytes());
@@ -447,9 +491,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeOffset() throws IOException {
-    if (skip)
+  public void testFailureNegativeOffset() throws IOException
+  {
+    if (skip) {
       return;
+    }
     try {
       writer.append("keyX".getBytes(), -1, 4, "valueX".getBytes(), 0, 6);
       Assert.fail("Error on handling negative offset.");
@@ -460,9 +506,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeOffset_2() throws IOException {
-    if (skip)
+  public void testFailureNegativeOffset_2() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -480,9 +528,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeLength() throws IOException {
-    if (skip)
+  public void testFailureNegativeLength() throws IOException
+  {
+    if (skip) {
       return;
+    }
     try {
       writer.append("keyX".getBytes(), 0, -1, "valueX".getBytes(), 0, 6);
       Assert.fail("Error on handling negative length.");
@@ -493,9 +543,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeLength_2() throws IOException {
-    if (skip)
+  public void testFailureNegativeLength_2() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
 
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
@@ -513,9 +565,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureNegativeLength_3() throws IOException {
-    if (skip)
+  public void testFailureNegativeLength_3() throws IOException
+  {
+    if (skip) {
       return;
+    }
     writeRecords(3);
 
     Reader reader =
@@ -544,9 +598,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureCompressionNotWorking() throws IOException {
-    if (skip)
+  public void testFailureCompressionNotWorking() throws IOException
+  {
+    if (skip) {
       return;
+    }
     long rawDataSize = writeRecords(10 * records1stBlock, false);
     if (!compression.equalsIgnoreCase(Compression.Algorithm.NONE.getName())) {
       Assert.assertTrue(out.getPos() < rawDataSize);
@@ -555,9 +611,11 @@ public class TestDTFileByteArrays {
   }
 
   @Test
-  public void testFailureFileWriteNotAt0Position() throws IOException {
-    if (skip)
+  public void testFailureFileWriteNotAt0Position() throws IOException
+  {
+    if (skip) {
       return;
+    }
     closeOutput();
     out = fs.create(path);
     out.write(123);
@@ -571,11 +629,13 @@ public class TestDTFileByteArrays {
     closeOutput();
   }
 
-  private long writeRecords(int count) throws IOException {
+  private long writeRecords(int count) throws IOException
+  {
     return writeRecords(count, true);
   }
 
-  private long writeRecords(int count, boolean close) throws IOException {
+  private long writeRecords(int count, boolean close) throws IOException
+  {
     long rawDataSize = writeRecords(writer, count);
     if (close) {
       closeOutput();
@@ -583,7 +643,8 @@ public class TestDTFileByteArrays {
     return rawDataSize;
   }
 
-  static long writeRecords(Writer writer, int count) throws IOException {
+  static long writeRecords(Writer writer, int count) throws IOException
+  {
     long rawDataSize = 0;
     int nx;
     for (nx = 0; nx < count; nx++) {
@@ -599,21 +660,24 @@ public class TestDTFileByteArrays {
 
   /**
    * Insert some leading 0's in front of the value, to make the keys sorted.
-   * 
+   *
    * @param prefix prefix
    * @param value  value
    * @return sorted key
    */
-  static String composeSortedKey(String prefix, int value) {
+  static String composeSortedKey(String prefix, int value)
+  {
     return String.format("%s%010d", prefix, value);
   }
 
-  private void readRecords(int count) throws IOException {
+  private void readRecords(int count) throws IOException
+  {
     readRecords(fs, path, count, conf);
   }
 
   static void readRecords(FileSystem fs, Path path, int count,
-      Configuration conf) throws IOException {
+      Configuration conf) throws IOException
+  {
     Reader reader =
         new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner = reader.createScanner();
@@ -643,7 +707,8 @@ public class TestDTFileByteArrays {
     }
   }
 
-  private void checkBlockIndex(int recordIndex, int blockIndexExpected) throws IOException {
+  private void checkBlockIndex(int recordIndex, int blockIndexExpected) throws IOException
+  {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
     Scanner scanner = reader.createScanner();
     scanner.seekTo(composeSortedKey(KEY, recordIndex).getBytes());
@@ -654,12 +719,11 @@ public class TestDTFileByteArrays {
   }
 
   private void readValueBeforeKey(int recordIndex)
-      throws IOException {
+      throws IOException
+  {
     Reader reader =
         new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
-            .getBytes(), null);
+    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
 
     try {
       byte[] vbuf = new byte[BUF_SIZE];
@@ -679,11 +743,10 @@ public class TestDTFileByteArrays {
   }
 
   private void readKeyWithoutValue(int recordIndex)
-      throws IOException {
+      throws IOException
+  {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
-            .getBytes(), null);
+    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
 
     try {
       // read the indexed key
@@ -708,12 +771,11 @@ public class TestDTFileByteArrays {
   }
 
   private void readValueWithoutKey(int recordIndex)
-      throws IOException {
+      throws IOException
+  {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
 
-    Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
-            .getBytes(), null);
+    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
 
     byte[] vbuf1 = new byte[BUF_SIZE];
     int vlen1 = scanner.entry().getValueLength();
@@ -724,20 +786,18 @@ public class TestDTFileByteArrays {
       byte[] vbuf2 = new byte[BUF_SIZE];
       int vlen2 = scanner.entry().getValueLength();
       scanner.entry().getValue(vbuf2);
-      Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
-          + (recordIndex + 1));
+      Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1));
     }
 
     scanner.close();
     reader.close();
   }
 
-  private void readKeyManyTimes(int recordIndex) throws IOException {
+  private void readKeyManyTimes(int recordIndex) throws IOException
+  {
     Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
 
-    Scanner scanner =
-        reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
-            .getBytes(), null);
+    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
 
     // read the indexed key
     byte[] kbuf1 = new byte[BUF_SIZE];
@@ -760,7 +820,8 @@ public class TestDTFileByteArrays {
     reader.close();
   }
 
-  private void closeOutput() throws IOException {
+  private void closeOutput() throws IOException
+  {
     if (writer != null) {
       writer.close();
       writer = null;


[22/22] incubator-apex-malhar git commit: Merge branch 'APEXMALHAR-2095' of https://github.com/chandnisingh/incubator-apex-malhar

Posted by th...@apache.org.
Merge branch 'APEXMALHAR-2095' of https://github.com/chandnisingh/incubator-apex-malhar


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/commit/3ce83708
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/tree/3ce83708
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/diff/3ce83708

Branch: refs/heads/master
Commit: 3ce83708f795b081d564be357a8333928154398e
Parents: 029291d 3735316
Author: Thomas Weise <th...@datatorrent.com>
Authored: Wed May 18 13:34:39 2016 -0700
Committer: Thomas Weise <th...@datatorrent.com>
Committed: Wed May 18 13:34:39 2016 -0700

----------------------------------------------------------------------
 library/library-checkstyle-suppressions.xml     |   34 +
 library/pom.xml                                 |    7 +-
 .../lib/algo/AbstractStreamPatternMatcher.java  |    5 +-
 .../datatorrent/lib/algo/BottomNUnifier.java    |    6 +-
 .../com/datatorrent/lib/algo/FilterKeyVals.java |    9 +-
 .../datatorrent/lib/algo/FilterKeysHashMap.java |    4 +-
 .../com/datatorrent/lib/algo/FilterKeysMap.java |   11 +-
 .../com/datatorrent/lib/algo/FilterValues.java  |    2 +-
 .../java/com/datatorrent/lib/algo/FirstN.java   |    3 +-
 .../com/datatorrent/lib/algo/InsertSort.java    |    3 +-
 .../datatorrent/lib/algo/InsertSortDesc.java    |    9 +-
 .../lib/algo/LeastFrequentKeyArrayUnifier.java  |    2 +-
 .../lib/algo/LeastFrequentKeyMap.java           |    6 +-
 .../lib/algo/LeastFrequentKeyUnifier.java       |    2 +-
 .../lib/algo/LeastFrequentValue.java            |    4 +-
 .../java/com/datatorrent/lib/algo/MatchMap.java |    5 +-
 .../com/datatorrent/lib/algo/MergeSort.java     |  224 ++--
 .../datatorrent/lib/algo/MergeSortNumber.java   |   94 +-
 .../lib/algo/MostFrequentKeyMap.java            |    4 +-
 .../datatorrent/lib/algo/MostFrequentValue.java |    2 +-
 .../com/datatorrent/lib/algo/UniqueCounter.java |    9 +-
 .../com/datatorrent/lib/appdata/QueueUtils.java |   11 +-
 .../com/datatorrent/lib/appdata/StoreUtils.java |   35 +-
 .../datastructs/CacheLRUSynchronousFlush.java   |   27 +-
 .../appdata/datastructs/DimensionalTable.java   |   69 +-
 .../lib/appdata/gpo/GPOByteArrayList.java       |   10 +-
 .../datatorrent/lib/appdata/gpo/GPOGetters.java |    4 +-
 .../datatorrent/lib/appdata/gpo/GPOMutable.java |   96 +-
 .../datatorrent/lib/appdata/gpo/GPOType.java    |  244 ++--
 .../datatorrent/lib/appdata/gpo/GPOUtils.java   | 1176 +++++++-----------
 .../com/datatorrent/lib/appdata/gpo/Serde.java  |    5 +-
 .../lib/appdata/gpo/SerdeFieldsDescriptor.java  |   10 +-
 .../lib/appdata/gpo/SerdeListGPOMutable.java    |   19 +-
 .../lib/appdata/gpo/SerdeListPrimitive.java     |   14 +-
 .../lib/appdata/gpo/SerdeListString.java        |   12 +-
 .../query/AbstractWindowEndQueueManager.java    |   89 +-
 .../query/AppDataWindowEndQueueManager.java     |   15 +-
 .../lib/appdata/query/QueryBundle.java          |    4 +-
 .../lib/appdata/query/QueryExecutor.java        |    4 +-
 .../appdata/query/QueryManagerAsynchronous.java |   37 +-
 .../appdata/query/QueryManagerSynchronous.java  |   23 +-
 .../lib/appdata/query/QueueList.java            |   25 +-
 .../lib/appdata/query/QueueManager.java         |    5 +
 .../appdata/query/SimpleDoneQueueManager.java   |    4 +-
 .../lib/appdata/query/SimpleQueueManager.java   |   16 +-
 .../lib/appdata/query/WindowBoundedService.java |   11 +-
 .../appdata/query/WindowEndQueueManager.java    |    9 +-
 .../query/serde/CustomMessageDeserializer.java  |    4 +-
 .../serde/DataQuerySnapshotDeserializer.java    |   81 +-
 .../query/serde/DataQuerySnapshotValidator.java |   11 +-
 .../serde/DataResultSnapshotSerializer.java     |   14 +-
 .../query/serde/MessageDeserializerFactory.java |   84 +-
 .../query/serde/MessageSerializerFactory.java   |   52 +-
 .../query/serde/SchemaQueryDeserializer.java    |   21 +-
 .../query/serde/SimpleDataDeserializer.java     |    3 +-
 .../query/serde/SimpleDataSerializer.java       |    3 +-
 .../lib/appdata/schemas/CustomTimeBucket.java   |   18 +-
 .../lib/appdata/schemas/DataQuerySnapshot.java  |   40 +-
 .../lib/appdata/schemas/DataResultSnapshot.java |   14 +-
 .../schemas/DimensionalConfigurationSchema.java |   11 +-
 .../lib/appdata/schemas/DimensionalSchema.java  |    5 +-
 .../datatorrent/lib/appdata/schemas/Fields.java |   17 +-
 .../lib/appdata/schemas/FieldsDescriptor.java   |   84 +-
 .../datatorrent/lib/appdata/schemas/QRBase.java |    7 +-
 .../datatorrent/lib/appdata/schemas/Query.java  |   21 +-
 .../datatorrent/lib/appdata/schemas/Result.java |    3 +-
 .../lib/appdata/schemas/ResultFormatter.java    |   58 +-
 .../datatorrent/lib/appdata/schemas/Schema.java |    5 +
 .../lib/appdata/schemas/SchemaQuery.java        |   13 +-
 .../lib/appdata/schemas/SchemaRegistry.java     |    3 +
 .../appdata/schemas/SchemaRegistryMultiple.java |    7 +-
 .../appdata/schemas/SchemaRegistrySingle.java   |   18 +-
 .../lib/appdata/schemas/SchemaResult.java       |   10 +-
 .../appdata/schemas/SchemaResultSerializer.java |   23 +-
 .../lib/appdata/schemas/SchemaUtils.java        |  144 +--
 .../lib/appdata/schemas/SnapshotSchema.java     |   61 +-
 .../lib/appdata/schemas/TimeBucket.java         |   11 +-
 .../datatorrent/lib/appdata/schemas/Type.java   |   52 +-
 .../snapshot/AbstractAppDataSnapshotServer.java |   16 +-
 .../snapshot/AppDataSnapshotServerMap.java      |   15 +-
 .../snapshot/AppDataSnapshotServerPOJO.java     |    2 +-
 .../lib/codec/JavaSerializationStreamCodec.java |   62 +-
 .../ByteArrayToStringConverterOperator.java     |    7 +-
 .../datatorrent/lib/converter/Converter.java    |    3 +-
 .../MapToKeyHashValuePairConverter.java         |    8 +-
 .../converter/MapToKeyValuePairConverter.java   |   11 +-
 .../StringValueToNumberConverterForMap.java     |   14 +-
 .../datatorrent/lib/counters/BasicCounters.java |   11 +-
 ...nsactionableKeyValueStoreOutputOperator.java |    2 +-
 ...BatchTransactionableStoreOutputOperator.java |   10 +-
 .../db/AbstractKeyValueStoreInputOperator.java  |    6 +-
 ...nsactionableKeyValueStoreOutputOperator.java |    2 +-
 ...sThruTransactionableStoreOutputOperator.java |    3 +-
 .../lib/db/AbstractStoreInputOperator.java      |    8 +-
 .../lib/db/AbstractStoreOutputOperator.java     |    8 +-
 ...tractTransactionableStoreOutputOperator.java |    8 +-
 ...bcNonTransactionableBatchOutputOperator.java |   20 +-
 .../db/jdbc/JDBCDimensionalOutputOperator.java  |    8 +-
 .../lib/db/jdbc/JdbcNonTransactionalStore.java  |   11 +-
 .../lib/fileaccess/DTFileReader.java            |    5 +-
 .../lib/fileaccess/FileAccessFSImpl.java        |    6 +-
 .../datatorrent/lib/fileaccess/TFileImpl.java   |   13 +-
 .../datatorrent/lib/fileaccess/TFileReader.java |    9 +-
 .../datatorrent/lib/fileaccess/TFileWriter.java |    8 +-
 .../datatorrent/lib/formatter/Formatter.java    |    2 +-
 .../lib/io/AbstractHttpGetOperator.java         |    3 +-
 .../lib/io/AbstractHttpInputOperator.java       |   11 +-
 .../lib/io/AbstractHttpOperator.java            |    6 +-
 .../io/AbstractKeyValueStoreOutputOperator.java |   12 +-
 .../lib/io/AbstractSocketInputOperator.java     |   16 +-
 .../datatorrent/lib/io/ApacheGenRandomLogs.java |  315 ++---
 .../CollectionMultiConsoleOutputOperator.java   |    8 +-
 .../lib/io/ConsoleOutputOperator.java           |   11 +-
 .../lib/io/HttpJsonChunksInputOperator.java     |   13 +-
 .../lib/io/HttpLinesInputOperator.java          |    6 +-
 .../lib/io/HttpPostOutputOperator.java          |    7 +-
 .../lib/io/IdempotentStorageManager.java        |   15 +-
 .../lib/io/MapMultiConsoleOutputOperator.java   |    8 +-
 .../lib/io/PubSubWebSocketAppDataQuery.java     |   17 +-
 .../lib/io/PubSubWebSocketAppDataResult.java    |   17 +-
 .../lib/io/PubSubWebSocketInputOperator.java    |    3 +-
 .../lib/io/SimpleSinglePortInputOperator.java   |   20 +-
 .../datatorrent/lib/io/SmtpOutputOperator.java  |   30 +-
 .../lib/io/WebSocketInputOperator.java          |   25 +-
 .../lib/io/WebSocketOutputOperator.java         |    6 +-
 .../lib/io/WebSocketServerInputOperator.java    |   14 +-
 .../lib/io/WidgetOutputOperator.java            |   28 +-
 .../lib/io/fs/AbstractFileInputOperator.java    |  182 ++-
 .../lib/io/fs/AbstractReconciler.java           |   17 +-
 .../fs/AbstractThroughputFileInputOperator.java |   34 +-
 .../com/datatorrent/lib/io/fs/FileSplitter.java |  101 +-
 .../lib/io/fs/FilterStreamContext.java          |    2 +-
 .../lib/io/fs/FilterStreamProvider.java         |   13 +-
 .../lib/io/fs/TailFsInputOperator.java          |    6 +-
 .../com/datatorrent/lib/io/fs/package-info.java |    2 +-
 .../lib/io/jms/AbstractJMSOutputOperator.java   |   38 +-
 .../AbstractJMSSinglePortOutputOperator.java    |    3 +-
 .../io/jms/FSPsuedoTransactionableStore.java    |   69 +-
 .../com/datatorrent/lib/io/jms/JMSBase.java     |   18 +-
 .../lib/io/jms/JMSMultiPortOutputOperator.java  |   34 +-
 .../lib/io/jms/JMSObjectInputOperator.java      |   39 +-
 .../lib/io/jms/JMSTransactionableStore.java     |   56 +-
 .../datatorrent/lib/io/jms/package-info.java    |    2 +-
 .../logs/ApacheLogParseMapOutputOperator.java   |   13 +-
 .../lib/logs/ApacheLogParseOperator.java        |  193 ++-
 .../lib/logs/ApacheVirtualLogParseOperator.java |  296 ++---
 .../lib/logs/DimensionAggregationUnifier.java   |    6 +-
 .../datatorrent/lib/logs/DimensionObject.java   |   11 +-
 .../lib/logs/FilteredLineToTokenArrayList.java  |    5 +-
 .../lib/logs/FilteredLineToTokenHashMap.java    |   57 +-
 .../lib/logs/FilteredLineTokenizerKeyVal.java   |   57 +-
 .../lib/logs/LineToTokenArrayList.java          |   14 +-
 .../lib/logs/LineToTokenHashMap.java            |    5 +-
 .../com/datatorrent/lib/logs/LineTokenizer.java |   31 +-
 .../lib/logs/LineTokenizerKeyVal.java           |   14 +-
 .../logs/MultiWindowDimensionAggregation.java   |   30 +-
 .../lib/logs/RegexMatchMapOperator.java         |   15 +-
 .../lib/math/AbstractAggregateCalc.java         |   88 +-
 .../datatorrent/lib/math/AbstractOutput.java    |   42 +-
 .../lib/math/AbstractXmlCartesianProduct.java   |   42 +-
 .../AbstractXmlKeyValueCartesianProduct.java    |    3 +-
 .../java/com/datatorrent/lib/math/Average.java  |  138 +-
 .../com/datatorrent/lib/math/AverageKeyVal.java |  167 +--
 .../java/com/datatorrent/lib/math/Change.java   |  102 +-
 .../com/datatorrent/lib/math/ChangeAlert.java   |  114 +-
 .../datatorrent/lib/math/ChangeAlertKeyVal.java |  136 +-
 .../datatorrent/lib/math/ChangeAlertMap.java    |    2 +-
 .../com/datatorrent/lib/math/ChangeKeyVal.java  |    6 +-
 .../datatorrent/lib/math/CompareExceptMap.java  |    9 +-
 .../com/datatorrent/lib/math/CompareMap.java    |   11 +-
 .../com/datatorrent/lib/math/CountKeyVal.java   |  114 +-
 .../java/com/datatorrent/lib/math/Division.java |   12 +-
 .../com/datatorrent/lib/math/ExceptMap.java     |   58 +-
 .../datatorrent/lib/math/LogicalCompare.java    |  102 +-
 .../lib/math/LogicalCompareToConstant.java      |  130 +-
 .../java/com/datatorrent/lib/math/Margin.java   |  160 +--
 .../com/datatorrent/lib/math/MarginKeyVal.java  |  254 ++--
 .../com/datatorrent/lib/math/MarginMap.java     |   16 +-
 .../main/java/com/datatorrent/lib/math/Max.java |    5 +-
 .../com/datatorrent/lib/math/MaxKeyVal.java     |   10 +-
 .../main/java/com/datatorrent/lib/math/Min.java |   15 +-
 .../com/datatorrent/lib/math/MinKeyVal.java     |   16 +-
 .../lib/math/MultiplyByConstant.java            |    8 +-
 .../java/com/datatorrent/lib/math/Quotient.java |  108 +-
 .../com/datatorrent/lib/math/QuotientMap.java   |  342 ++---
 .../java/com/datatorrent/lib/math/Range.java    |  106 +-
 .../com/datatorrent/lib/math/RangeKeyVal.java   |  146 ++-
 .../datatorrent/lib/math/RunningAverage.java    |  114 +-
 .../java/com/datatorrent/lib/math/Sigma.java    |   40 +-
 .../main/java/com/datatorrent/lib/math/Sum.java |  366 +++---
 .../com/datatorrent/lib/math/SumCountMap.java   |  444 +++----
 .../com/datatorrent/lib/math/SumKeyVal.java     |   13 +-
 .../math/XmlKeyValueStringCartesianProduct.java |    7 +-
 .../lib/multiwindow/AbstractSlidingWindow.java  |  210 ++--
 .../AbstractSlidingWindowKeyVal.java            |  184 +--
 .../lib/multiwindow/MultiWindowRangeKeyVal.java |    2 +-
 .../lib/multiwindow/MultiWindowSumKeyVal.java   |   75 +-
 .../lib/multiwindow/SimpleMovingAverage.java    |  150 +--
 .../lib/multiwindow/SortedMovingWindow.java     |   10 +-
 .../StatsAwareStatelessPartitioner.java         |   37 +-
 .../lib/script/JavaScriptOperator.java          |   43 +-
 .../datatorrent/lib/script/ScriptOperator.java  |   13 +-
 .../lib/statistics/MedianOperator.java          |   12 +-
 .../lib/statistics/ModeOperator.java            |    4 +-
 .../lib/statistics/StandardDeviation.java       |   12 +-
 .../lib/statistics/WeightedMeanOperator.java    |   14 +-
 .../lib/stream/AbstractAggregator.java          |  166 +--
 .../datatorrent/lib/stream/ArrayListToItem.java |   43 +-
 .../lib/stream/ConsolidatorKeyVal.java          |  266 ++--
 .../com/datatorrent/lib/stream/Counter.java     |   92 +-
 .../com/datatorrent/lib/stream/DevNull.java     |   24 +-
 .../datatorrent/lib/stream/DevNullCounter.java  |  264 ++--
 .../lib/stream/HashMapToKeyValPair.java         |   85 +-
 .../lib/stream/JsonByteArrayOperator.java       |   10 +-
 .../lib/stream/KeyValPairToHashMap.java         |   43 +-
 .../lib/stream/RoundRobinHashMap.java           |   97 +-
 .../lib/stream/StreamDuplicater.java            |    6 +-
 .../datatorrent/lib/stream/StreamMerger.java    |   10 +-
 .../streamquery/AbstractSqlStreamOperator.java  |   11 +-
 .../lib/streamquery/DeleteOperator.java         |    2 +-
 .../lib/streamquery/DerbySqlStreamOperator.java |   93 +-
 .../lib/streamquery/GroupByHavingOperator.java  |   23 +-
 .../lib/streamquery/InnerJoinOperator.java      |   47 +-
 .../lib/streamquery/OrderByOperator.java        |  100 +-
 .../lib/streamquery/OrderByRule.java            |    2 +-
 .../lib/streamquery/OuterJoinOperator.java      |    4 +-
 .../lib/streamquery/SelectFunctionOperator.java |    4 +-
 .../lib/streamquery/SelectOperator.java         |    5 +-
 .../lib/streamquery/SelectTopOperator.java      |   17 +-
 .../lib/streamquery/UpdateOperator.java         |   33 +-
 .../streamquery/condition/BetweenCondition.java |   18 +-
 .../condition/CompoundCondition.java            |   19 +-
 .../lib/streamquery/condition/Condition.java    |   13 +-
 .../condition/EqualValueCondition.java          |   15 +-
 .../condition/HavingCompareValue.java           |    4 +-
 .../streamquery/condition/HavingCondition.java  |    7 +-
 .../lib/streamquery/condition/InCondition.java  |   12 +-
 .../condition/JoinColumnEqualCondition.java     |    9 +-
 .../streamquery/condition/LikeCondition.java    |   14 +-
 .../streamquery/function/AverageFunction.java   |   10 +-
 .../lib/streamquery/function/CountFunction.java |   12 +-
 .../streamquery/function/FirstLastFunction.java |   24 +-
 .../lib/streamquery/function/FunctionIndex.java |   18 +-
 .../streamquery/function/MaxMinFunction.java    |   14 +-
 .../lib/streamquery/function/SumFunction.java   |    8 +-
 .../lib/streamquery/index/BinaryExpression.java |    8 +-
 .../lib/streamquery/index/ColumnIndex.java      |    4 +-
 .../lib/streamquery/index/Index.java            |    2 +-
 .../lib/streamquery/index/MidIndex.java         |   16 +-
 .../lib/streamquery/index/NegateExpression.java |    8 +-
 .../lib/streamquery/index/RoundDoubleIndex.java |   16 +-
 .../lib/streamquery/index/StringCaseIndex.java  |   10 +-
 .../lib/streamquery/index/StringLenIndex.java   |   10 +-
 .../lib/streamquery/index/SumExpression.java    |   14 +-
 .../lib/streamquery/index/UnaryExpression.java  |    6 +-
 .../lib/testbench/ArrayListTestSink.java        |    7 +-
 .../lib/testbench/CollectorTestSink.java        |   14 +-
 .../lib/testbench/CompareFilterTuples.java      |  117 +-
 .../testbench/CountAndLastTupleTestSink.java    |    4 +-
 .../lib/testbench/CountOccurance.java           |  116 +-
 .../lib/testbench/CountTestSink.java            |    3 +-
 .../lib/testbench/EventClassifier.java          |   92 +-
 .../EventClassifierNumberToHashDouble.java      |   17 +-
 .../lib/testbench/EventGenerator.java           |   27 +-
 .../lib/testbench/EventIncrementer.java         |   15 +-
 .../lib/testbench/FilterClassifier.java         |   13 +-
 .../lib/testbench/FilteredEventClassifier.java  |   13 +-
 .../datatorrent/lib/testbench/HashTestSink.java |   22 +-
 .../lib/testbench/HttpStatusFilter.java         |  105 +-
 .../datatorrent/lib/testbench/KeyValSum.java    |   72 +-
 .../lib/testbench/RandomEventGenerator.java     |   15 +-
 .../lib/testbench/RandomWordGenerator.java      |   11 +-
 .../datatorrent/lib/testbench/RedisSumOper.java |   70 +-
 .../lib/testbench/SeedEventClassifier.java      |   12 +-
 .../lib/testbench/SeedEventGenerator.java       |   13 +-
 .../datatorrent/lib/testbench/SumTestSink.java  |    2 +-
 .../lib/testbench/ThroughputCounter.java        |   19 +-
 .../lib/testbench/TopOccurrence.java            |  162 ++-
 .../lib/transform/TransformOperator.java        |    3 +-
 .../lib/util/AbstractBaseFrequentKey.java       |   15 +-
 .../util/AbstractBaseFrequentKeyValueMap.java   |   11 +-
 .../lib/util/AbstractBaseMatchOperator.java     |   25 +-
 .../util/AbstractBaseNNonUniqueOperatorMap.java |    7 +-
 .../lib/util/AbstractBaseNOperatorMap.java      |    4 +-
 .../util/AbstractBaseNUniqueOperatorMap.java    |    7 +-
 .../lib/util/AbstractBaseSortOperator.java      |   11 +-
 .../AbstractDimensionTimeBucketOperator.java    |   33 +-
 .../lib/util/AbstractKeyValueStorageAgent.java  |    2 +-
 .../lib/util/ActiveMQMessageListener.java       |   23 +-
 .../util/ActiveMQMultiTypeMessageListener.java  |   42 +-
 .../lib/util/ArrayHashMapFrequent.java          |   14 +-
 .../lib/util/BaseFilteredKeyValueOperator.java  |    2 +-
 .../lib/util/BaseKeyValueOperator.java          |    3 +-
 .../datatorrent/lib/util/BaseLineTokenizer.java |   13 +-
 .../datatorrent/lib/util/BaseMatchOperator.java |    5 +-
 .../lib/util/BaseNumberKeyValueOperator.java    |   32 +-
 .../lib/util/BaseNumberValueOperator.java       |   18 +-
 .../util/DimensionTimeBucketSumOperator.java    |    7 +-
 .../com/datatorrent/lib/util/FieldInfo.java     |   11 +-
 .../datatorrent/lib/util/FilterOperator.java    |    2 +-
 .../lib/util/JavaScriptFilterOperator.java      |   29 +-
 .../datatorrent/lib/util/KeyHashValPair.java    |    2 +-
 .../com/datatorrent/lib/util/PojoUtils.java     |  142 ++-
 .../lib/util/ReusableStringReader.java          |    6 +-
 .../lib/util/ReversibleComparator.java          |    1 +
 .../com/datatorrent/lib/util/TableInfo.java     |    2 +-
 .../com/datatorrent/lib/util/TimeBucketKey.java |   36 +-
 .../java/com/datatorrent/lib/util/TopNSort.java |    3 +-
 .../datatorrent/lib/util/TopNUniqueSort.java    |    7 +-
 .../lib/util/UnifierArrayHashMapFrequent.java   |   14 +-
 .../datatorrent/lib/util/UnifierArrayList.java  |    2 +-
 .../datatorrent/lib/util/UnifierBooleanAnd.java |    2 +-
 .../datatorrent/lib/util/UnifierBooleanOr.java  |    2 +-
 .../lib/util/UnifierCountOccurKey.java          |   28 +-
 .../datatorrent/lib/util/UnifierHashMap.java    |    6 +-
 .../lib/util/UnifierHashMapFrequent.java        |   14 +-
 .../lib/util/UnifierHashMapInteger.java         |   11 +-
 .../lib/util/UnifierHashMapRange.java           |   11 +-
 .../lib/util/UnifierHashMapSumKeys.java         |   11 +-
 .../lib/util/UnifierKeyValRange.java            |    8 +-
 .../com/datatorrent/lib/util/UnifierMap.java    |    8 +-
 .../com/datatorrent/lib/util/UnifierRange.java  |    5 +-
 .../datatorrent/lib/util/UnifierSumNumber.java  |    2 +-
 .../lib/xml/AbstractXmlDOMOperator.java         |   12 +-
 .../malhar/lib/fs/BytesFileOutputOperator.java  |    2 -
 .../lib/state/managed/ManagedTimeStateImpl.java |    5 +-
 .../hadoop/io/file/tfile/CacheManager.java      |   70 +-
 .../tfile/ReusableByteArrayInputStream.java     |    7 +-
 .../algo/AbstractStreamPatternMatcherTest.java  |    2 +-
 .../lib/algo/AllAfterMatchMapTest.java          |   12 +-
 .../datatorrent/lib/algo/BottomNMapTest.java    |   10 +-
 .../lib/algo/BottomNUnifierTest.java            |   10 +-
 .../lib/algo/BottomNUniqueMapTest.java          |   11 +-
 .../datatorrent/lib/algo/DistinctMapTest.java   |    6 +-
 .../datatorrent/lib/algo/FilterKeysMapTest.java |    2 +-
 .../datatorrent/lib/algo/FilterValuesTest.java  |    4 +-
 .../com/datatorrent/lib/algo/FirstNTest.java    |   10 +-
 .../lib/algo/InsertSortDescTest.java            |    8 +-
 .../datatorrent/lib/algo/InsertSortTest.java    |    2 +-
 .../lib/algo/InvertIndexArrayTest.java          |   10 +-
 .../datatorrent/lib/algo/InvertIndexTest.java   |   10 +-
 .../lib/algo/LeastFrequentKeyMapTest.java       |    5 +-
 .../lib/algo/LeastFrequentKeyTest.java          |   15 +-
 .../lib/algo/LeastFrequentKeyValueMapTest.java  |    8 +-
 .../datatorrent/lib/algo/MatchAllMapTest.java   |    4 +-
 .../datatorrent/lib/algo/MatchAnyMapTest.java   |    2 +-
 .../com/datatorrent/lib/algo/MatchMapTest.java  |    8 +-
 .../lib/algo/MergeSortNumberTest.java           |   60 +-
 .../lib/algo/MostFrequentKeyMapTest.java        |    5 +-
 .../lib/algo/MostFrequentKeyTest.java           |   15 +-
 .../lib/algo/MostFrequentKeyValueMapTest.java   |    8 +-
 .../java/com/datatorrent/lib/algo/TopNTest.java |   10 +-
 .../datatorrent/lib/algo/TopNUniqueTest.java    |    8 +-
 .../datatorrent/lib/algo/UniqueCounterTest.java |   10 +-
 .../lib/algo/UniqueValueCountTest.java          |   85 +-
 .../CacheLRUSynchronousFlushTest.java           |   11 +-
 .../datastructs/DimensionalTableTest.java       |   18 +-
 .../lib/appdata/gpo/GPOMutableTest.java         |    4 +-
 .../lib/appdata/gpo/GPOUtilsTest.java           |   48 +-
 .../appdata/gpo/SerdeFieldsDescriptorTest.java  |    7 +-
 .../appdata/gpo/SerdeListGPOMutableTest.java    |    9 +-
 .../lib/appdata/gpo/SerdeListPrimitiveTest.java |   17 +-
 .../lib/appdata/gpo/SerdeListStringTest.java    |    6 +-
 .../lib/appdata/query/MockQuery.java            |   14 +-
 .../lib/appdata/query/MockResult.java           |   10 +-
 .../lib/appdata/query/MockResultSerializer.java |    5 +-
 .../query/QueryManagerAsynchronousTest.java     |   58 +-
 .../query/QueryManagerSynchronousTest.java      |   17 +-
 .../lib/appdata/query/QueueUtilsTest.java       |    4 +-
 .../query/SimpleDoneQueryQueueManagerTest.java  |   10 +-
 .../appdata/query/WEQueryQueueManagerTest.java  |   54 +-
 .../appdata/query/WindowBoundedServiceTest.java |    6 +-
 .../serde/MessageDeserializerFactoryTest.java   |    6 +-
 .../DataQuerySnapshotDeserializerTest.java      |   15 +-
 .../appdata/schemas/DataQuerySnapshotTest.java  |   10 +-
 .../DataResultSnapshotSerializerTest.java       |   25 +-
 .../DimensionalConfigurationSchemaTest.java     |  190 ++-
 .../appdata/schemas/DimensionalSchemaTest.java  |  184 ++-
 .../appdata/schemas/FieldsDescriptorTest.java   |   30 +-
 .../appdata/schemas/ResultFormatterTest.java    |   16 +-
 .../lib/appdata/schemas/SchemaQueryTest.java    |   42 +-
 .../schemas/SchemaRegistryMultipleTest.java     |   12 +-
 .../lib/appdata/schemas/SchemaTestUtils.java    |    8 +-
 .../lib/appdata/schemas/SchemaUtilsTest.java    |    8 +-
 .../lib/appdata/schemas/SnapshotSchemaTest.java |   14 +-
 .../snapshot/AppDataSnapshotServerMapTest.java  |   34 +-
 .../snapshot/AppDataSnapshotServerPojoTest.java |    6 +-
 .../codec/JavaSerializationStreamCodecTest.java |  196 +--
 .../ByteArrayToStringConverterTest.java         |    7 +-
 .../MapToKeyValuePairConverterTest.java         |   11 +-
 .../StringValueToNumberConverterForMapTest.java |   30 +-
 .../lib/counters/BasicCountersTest.java         |    7 +-
 .../lib/db/KeyValueStoreOperatorTest.java       |   21 +-
 ...ransactionableKeyValueStoreOperatorTest.java |    5 +-
 ...NonTransactionalBatchOutputOperatorTest.java |  299 ++---
 .../JdbcNonTransactionalOutputOperatorTest.java |   16 +-
 .../db/jdbc/JdbcNonTransactionalStoreTest.java  |   12 +-
 .../lib/formatter/JsonFormatterTest.java        |   12 +-
 .../lib/formatter/XmlFormatterTest.java         |   10 +-
 .../lib/helper/OperatorContextTestHelper.java   |    2 +-
 .../helper/SamplePubSubWebSocketServlet.java    |   12 +-
 .../lib/io/ApacheRandomLogsTest.java            |   63 +-
 .../lib/io/HttpJsonChunksInputOperatorTest.java |    8 +-
 .../lib/io/HttpLinesInputOperatorTest.java      |    4 +-
 .../io/HttpMultiValuedMapGetOperatorTest.java   |    6 +-
 .../lib/io/HttpPostOutputOperatorTest.java      |    6 +-
 .../lib/io/IdempotentStorageManagerTest.java    |   11 +-
 .../io/PubSubWebSocketAppDataOperatorTest.java  |    3 +-
 .../lib/io/PubSubWebSocketAppDataQueryTest.java |    4 +-
 .../lib/io/PubSubWebSocketOperatorTest.java     |    6 +-
 .../lib/io/SmtpOutputOperatorTest.java          |   17 +-
 .../lib/io/SocketInputOperatorTest.java         |   17 +-
 .../io/WebSocketServerInputOperatorTest.java    |   10 +-
 ...actFileInputOperatorFailureHandlingTest.java |   50 +-
 .../io/fs/AbstractFileInputOperatorTest.java    |   94 +-
 .../io/fs/AbstractFileOutputOperatorTest.java   |  675 +++-------
 .../lib/io/fs/AbstractReconcilerTest.java       |    6 +-
 .../AbstractSingleFileOutputOperatorTest.java   |   57 +-
 .../AbstractWindowFileOutputOperatorTest.java   |   46 +-
 .../lib/io/fs/FSInputModuleAppTest.java         |    2 +-
 .../lib/io/fs/FileSplitterInputTest.java        |    9 +-
 .../lib/io/fs/TailFsInputOperatorTest.java      |    6 +-
 .../io/jms/JMSMultiPortOutputOperatorTest.java  |   21 +-
 .../lib/io/jms/JMSObjectInputOperatorTest.java  |   39 +-
 .../lib/io/jms/JMSOutputOperatorTest.java       |  345 ++---
 .../com/datatorrent/lib/io/jms/JMSTestBase.java |    7 +-
 .../io/jms/JMSTransactionableStoreTestBase.java |   51 +-
 .../lib/join/MapTimeBasedJoinOperator.java      |    4 +-
 .../lib/join/POJOTimeBasedJoinOperatorTest.java |   12 +-
 .../lib/logs/ApacheLogParseOperatorTest.java    |  100 +-
 .../logs/FilteredLineToTokenArrayListTest.java  |    9 +-
 .../logs/FilteredLineToTokenHashMapTest.java    |  102 +-
 .../logs/FilteredLineTokenizerKeyValTest.java   |   11 +-
 .../lib/logs/LineToTokenArrayListTest.java      |  126 +-
 .../lib/logs/LineToTokenHashMapTest.java        |  106 +-
 .../lib/logs/LineTokenizerKeyValTest.java       |   84 +-
 .../MultiWindowDimensionAggregationTest.java    |    7 +-
 .../lib/logs/RegexMatchMapOperatorTest.java     |   25 +-
 .../lib/logs/TopNUniqueSiteStatsTest.java       |   94 +-
 .../datatorrent/lib/math/AverageKeyValTest.java |   93 +-
 .../com/datatorrent/lib/math/AverageTest.java   |  120 +-
 .../lib/math/ChangeAlertKeyValTest.java         |  116 +-
 .../lib/math/ChangeAlertMapTest.java            |  134 +-
 .../datatorrent/lib/math/ChangeKeyValTest.java  |  128 +-
 .../com/datatorrent/lib/math/ChangeTest.java    |   82 +-
 .../lib/math/CompareExceptMapTest.java          |   28 +-
 .../datatorrent/lib/math/CompareMapTest.java    |    8 +-
 .../datatorrent/lib/math/CountKeyValTest.java   |   14 +-
 .../lib/math/CountOccuranceTest.java            |   14 +-
 .../com/datatorrent/lib/math/DivisionTest.java  |  123 +-
 .../com/datatorrent/lib/math/ExceptMapTest.java |   16 +-
 .../lib/math/LogicalCompareTest.java            |  120 +-
 .../lib/math/LogicalCompareToConstantTest.java  |  116 +-
 .../datatorrent/lib/math/MarginKeyValTest.java  |   92 +-
 .../com/datatorrent/lib/math/MarginMapTest.java |   96 +-
 .../com/datatorrent/lib/math/MarginTest.java    |    2 +-
 .../com/datatorrent/lib/math/MaxKeyValTest.java |   28 +-
 .../java/com/datatorrent/lib/math/MaxTest.java  |    6 +-
 .../com/datatorrent/lib/math/MinKeyValTest.java |  188 ++-
 .../java/com/datatorrent/lib/math/MinTest.java  |   10 +-
 .../lib/math/MultiplyByConstantTest.java        |    2 +-
 .../datatorrent/lib/math/QuotientMapTest.java   |  100 +-
 .../com/datatorrent/lib/math/QuotientTest.java  |  117 +-
 .../datatorrent/lib/math/RangeKeyValTest.java   |   30 +-
 .../com/datatorrent/lib/math/RangeTest.java     |  152 +--
 .../lib/math/RunningAverageTest.java            |   58 +-
 .../com/datatorrent/lib/math/SigmaTest.java     |   68 +-
 .../lib/math/SquareCalculusTest.java            |   58 +-
 .../datatorrent/lib/math/SumCountMapTest.java   |  227 ++--
 .../com/datatorrent/lib/math/SumKeyValTest.java |   14 +-
 .../java/com/datatorrent/lib/math/SumTest.java  |  124 +-
 .../XmlKeyValueStringCartesianProductTest.java  |    5 +-
 .../multiwindow/MultiWindowRangeKeyValTest.java |   54 +-
 .../multiwindow/MultiWindowSumKeyValTest.java   |   54 +-
 .../multiwindow/SimpleMovingAverageTest.java    |   20 +-
 .../lib/multiwindow/SlidingWindowTest.java      |  110 +-
 .../lib/multiwindow/SortedMovingWindowTest.java |   55 +-
 .../partitioner/StatelessPartitionerTest.java   |   21 +-
 ...StatelessThroughputBasedPartitionerTest.java |   12 +-
 .../lib/script/JavaScriptOperatorTest.java      |    2 +-
 .../lib/statistics/MeridianOperatorTest.java    |    6 +-
 .../lib/statistics/ModeOperatorTest.java        |    6 +-
 .../statistics/WeightedMeanOperatorTest.java    |    6 +-
 .../lib/stream/ArrayListAggregatorTest.java     |   53 +-
 .../lib/stream/ArrayListToItemTest.java         |   46 +-
 .../lib/stream/ConsolidatorKeyValTest.java      |   36 +-
 .../com/datatorrent/lib/stream/CounterTest.java |   57 +-
 .../lib/stream/DevNullCounterTest.java          |   48 +-
 .../com/datatorrent/lib/stream/DevNullTest.java |   17 +-
 .../lib/stream/HashMapToKeyValPairTest.java     |   54 +-
 .../lib/stream/JsonByteArrayOperatorTest.java   |  139 +--
 .../lib/stream/KeyPairToHashMapTest.java        |   48 +-
 .../lib/stream/RoundRobinHashMapTest.java       |   96 +-
 .../lib/stream/StreamDuplicaterTest.java        |   59 +-
 .../lib/stream/StreamMergerTest.java            |    4 +-
 .../lib/streamquery/DeleteOperatorTest.java     |   79 +-
 .../streamquery/FullOuterJoinOperatorTest.java  |   97 +-
 .../lib/streamquery/GroupByOperatorTest.java    |   83 +-
 .../lib/streamquery/HavingOperatorTest.java     |   83 +-
 .../lib/streamquery/InnerJoinOperatorTest.java  |   97 +-
 .../streamquery/LeftOuterJoinOperatorTest.java  |   95 +-
 .../lib/streamquery/OrderByOperatorTest.java    |  109 +-
 .../streamquery/RightOuterJoinOperatorTest.java |   89 +-
 .../lib/streamquery/SelectOperatorTest.java     |   84 +-
 .../lib/streamquery/SelectTopOperatorTest.java  |    9 +-
 .../lib/streamquery/UpdateOperatorTest.java     |   82 +-
 .../advanced/BetweenConditionTest.java          |   84 +-
 .../advanced/CompoundConditionTest.java         |   90 +-
 .../streamquery/advanced/InConditionTest.java   |   83 +-
 .../streamquery/advanced/LikeConditionTest.java |   81 +-
 .../streamquery/advanced/NegateIndexTest.java   |   72 +-
 .../streamquery/advanced/SelectAverageTest.java |   72 +-
 .../streamquery/advanced/SelectCountTest.java   |   73 +-
 .../advanced/SelectFirstLastTest.java           |   73 +-
 .../streamquery/advanced/SelectMaxMinTest.java  |   73 +-
 .../lib/streamquery/advanced/SumIndexTest.java  |   73 +-
 .../lib/testbench/ActiveMQMessageGenerator.java |  213 ++--
 .../lib/testbench/EventClassifierTest.java      |  348 +++---
 .../lib/testbench/EventGeneratorTest.java       |   36 +-
 .../lib/testbench/EventIncrementerTest.java     |   35 +-
 .../testbench/FilteredEventClassifierTest.java  |   61 +-
 .../lib/testbench/RandomEventGeneratorTest.java |    8 +-
 .../lib/testbench/SeedEventClassifierTest.java  |   19 +-
 .../lib/testbench/SeedEventGeneratorTest.java   |    7 +-
 .../lib/testbench/ThroughputCounterTest.java    |   29 +-
 .../lib/transform/TransformOperatorAppTest.java |   18 +-
 .../DimensionTimeBucketSumOperatorTest.java     |   11 +-
 .../lib/util/JavaScriptFilterOperatorTest.java  |    4 +-
 .../lib/util/KryoCloneUtilsTest.java            |    7 +-
 .../com/datatorrent/lib/util/PojoUtilsTest.java |   65 +-
 .../com/datatorrent/lib/util/TestUtils.java     |    6 +-
 .../CustomTimeBucketRegistryTest.java           |    2 -
 .../dimensions/DimensionsDescriptorTest.java    |   18 +-
 .../state/managed/ManagedStateTestUtils.java    |    3 -
 .../apache/hadoop/io/file/tfile/DTFileTest.java |   65 +-
 .../apache/hadoop/io/file/tfile/TestDTFile.java |  108 +-
 .../io/file/tfile/TestDTFileByteArrays.java     |  255 ++--
 537 files changed, 11424 insertions(+), 12366 deletions(-)
----------------------------------------------------------------------



[17/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptor.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptor.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptor.java
index 0bc4712..ec00fda 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptor.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptor.java
@@ -18,9 +18,7 @@
  */
 package com.datatorrent.lib.appdata.schemas;
 
-import it.unimi.dsi.fastutil.objects.Object2IntLinkedOpenHashMap;
 import java.io.Serializable;
-
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.EnumSet;
@@ -28,17 +26,19 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import com.datatorrent.lib.appdata.gpo.Serde;
 import com.datatorrent.lib.appdata.gpo.SerdeObjectPayloadFix;
 
+import it.unimi.dsi.fastutil.objects.Object2IntLinkedOpenHashMap;
+
 /**
  * This class manages the storage of fields in app data. It is used in {@link GPOMutable} objects
  * to map field names to values in order to respond to queries, it also serves as a schema which is
@@ -118,8 +118,7 @@ public class FieldsDescriptor implements Serializable
    * @param fieldToType A mapping from field names to the type of the field.
    * @param fieldToSerdeObject A mapping from field names to the corresponding serde object.
    */
-  public FieldsDescriptor(Map<String, Type> fieldToType,
-                          Map<String, Serde> fieldToSerdeObject)
+  public FieldsDescriptor(Map<String, Type> fieldToType, Map<String, Serde> fieldToSerdeObject)
   {
     setFieldToType(fieldToType);
     compressedTypes = Sets.newHashSet();
@@ -128,23 +127,17 @@ public class FieldsDescriptor implements Serializable
 
     List<String> fieldNames = typeToFields.get(Type.OBJECT);
 
-    if(fieldNames == null) {
-      throw new IllegalArgumentException("There are no fields of type " + Type.OBJECT +
-                                         " in this fieldsdescriptor");
-    }
-    else {
+    if (fieldNames == null) {
+      throw new IllegalArgumentException("There are no fields of type " + Type.OBJECT + " in this fieldsdescriptor");
+    } else {
       serdes = new Serde[fieldNames.size()];
 
       //Insert serdes in corresponding order
-      for(int index = 0;
-          index < fieldNames.size();
-          index++) {
+      for (int index = 0; index < fieldNames.size(); index++) {
         String fieldName = fieldNames.get(index);
         Serde serdeObject = fieldToSerdeObject.get(fieldName);
-        if(serdeObject == null) {
-          throw new IllegalArgumentException("The field "
-                                             + fieldName
-                                             + " doesn't have a serde object.");
+        if (serdeObject == null) {
+          throw new IllegalArgumentException("The field " + fieldName + " doesn't have a serde object.");
         }
 
         serdes[index] = serdeObject;
@@ -152,9 +145,8 @@ public class FieldsDescriptor implements Serializable
     }
   }
 
-  public FieldsDescriptor(Map<String, Type> fieldToType,
-                          Map<String, Serde> fieldToSerdeObject,
-                          SerdeObjectPayloadFix serdePayloadFix)
+  public FieldsDescriptor(Map<String, Type> fieldToType, Map<String, Serde> fieldToSerdeObject,
+      SerdeObjectPayloadFix serdePayloadFix)
   {
     this(fieldToType, fieldToSerdeObject);
     this.serdePayloadFix = serdePayloadFix;
@@ -185,13 +177,13 @@ public class FieldsDescriptor implements Serializable
     typeToFieldToIndex = Maps.newHashMap();
     typeToFields = Maps.newHashMap();
 
-    for(Map.Entry<String, Type> entry: fieldToType.entrySet()) {
+    for (Map.Entry<String, Type> entry : fieldToType.entrySet()) {
       String field = entry.getKey();
       Type type = entry.getValue();
 
       List<String> fieldsList = typeToFields.get(type);
 
-      if(fieldsList == null) {
+      if (fieldsList == null) {
         fieldsList = Lists.newArrayList();
         typeToFields.put(type, fieldsList);
       }
@@ -200,22 +192,19 @@ public class FieldsDescriptor implements Serializable
     }
 
     //ensure consistent ordering of fields
-    for(Map.Entry<Type, List<String>> entry: typeToFields.entrySet()) {
+    for (Map.Entry<Type, List<String>> entry : typeToFields.entrySet()) {
       Type type = entry.getKey();
       List<String> tempFields = entry.getValue();
 
       Collections.sort(tempFields);
       Object2IntLinkedOpenHashMap<String> fieldToIndex = new Object2IntLinkedOpenHashMap<String>();
 
-      for(int index = 0;
-          index < tempFields.size();
-          index++) {
+      for (int index = 0; index < tempFields.size(); index++) {
         String field = tempFields.get(index);
 
-        if(compressedTypes.contains(type)) {
+        if (compressedTypes.contains(type)) {
           fieldToIndex.put(field, 0);
-        }
-        else {
+        } else {
           fieldToIndex.put(field, index);
         }
       }
@@ -225,10 +214,9 @@ public class FieldsDescriptor implements Serializable
 
     //Types
 
-    if(!typeToFields.isEmpty()) {
+    if (!typeToFields.isEmpty()) {
       types = EnumSet.copyOf(typeToFields.keySet());
-    }
-    else {
+    } else {
       types = Sets.newHashSet();
     }
 
@@ -245,13 +233,12 @@ public class FieldsDescriptor implements Serializable
     //Array Sizes
     typeToSize = new Object2IntLinkedOpenHashMap<Type>();
 
-    for(Map.Entry<Type, List<String>> entry: typeToFields.entrySet()) {
+    for (Map.Entry<Type, List<String>> entry : typeToFields.entrySet()) {
       Type type = entry.getKey();
 
-      if(compressedTypes.contains(type)) {
+      if (compressedTypes.contains(type)) {
         getTypeToSize().put(type, 1);
-      }
-      else {
+      } else {
         getTypeToSize().put(type, entry.getValue().size());
       }
     }
@@ -272,8 +259,9 @@ public class FieldsDescriptor implements Serializable
    * @param fieldToType The field to type map to set for this {@link FieldsDescriptor}
    * object.
    */
-  private void setFieldToType(Map<String, Type> fieldToType) {
-    for(Map.Entry<String, Type> entry: fieldToType.entrySet()) {
+  private void setFieldToType(Map<String, Type> fieldToType)
+  {
+    for (Map.Entry<String, Type> entry : fieldToType.entrySet()) {
       Preconditions.checkNotNull(entry.getKey());
       Preconditions.checkNotNull(entry.getValue());
     }
@@ -287,7 +275,7 @@ public class FieldsDescriptor implements Serializable
    */
   private void setCompressedTypes(Set<Type> compressedTypes)
   {
-    for(Type type: compressedTypes) {
+    for (Type type : compressedTypes) {
       Preconditions.checkNotNull(type);
     }
 
@@ -319,7 +307,7 @@ public class FieldsDescriptor implements Serializable
    */
   public Fields getFields()
   {
-    if(fields == null) {
+    if (fields == null) {
       fields = new Fields(fieldToType.keySet());
     }
 
@@ -336,7 +324,7 @@ public class FieldsDescriptor implements Serializable
   {
     Map<String, Type> newFieldToType = Maps.newHashMap();
 
-    for(String field: fields.getFields()) {
+    for (String field : fields.getFields()) {
       Type type = fieldToType.get(field);
       newFieldToType.put(field, type);
     }
@@ -426,17 +414,19 @@ public class FieldsDescriptor implements Serializable
   @Override
   public boolean equals(Object obj)
   {
-    if(obj == null) {
+    if (obj == null) {
       return false;
     }
-    if(getClass() != obj.getClass()) {
+    if (getClass() != obj.getClass()) {
       return false;
     }
     final FieldsDescriptor other = (FieldsDescriptor)obj;
-    if(this.fieldToType != other.fieldToType && (this.fieldToType == null || !this.fieldToType.equals(other.fieldToType))) {
+    if (this.fieldToType != other.fieldToType && (this.fieldToType == null || !this.fieldToType.equals(
+        other.fieldToType))) {
       return false;
     }
-    if(this.compressedTypes != other.compressedTypes && (this.compressedTypes == null || !this.compressedTypes.equals(other.compressedTypes))) {
+    if (this.compressedTypes != other.compressedTypes && (this.compressedTypes == null || !this.compressedTypes.equals(
+        other.compressedTypes))) {
       return false;
     }
     return true;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/QRBase.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/QRBase.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/QRBase.java
index 6d97e9a..cbeaf2e 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/QRBase.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/QRBase.java
@@ -80,8 +80,7 @@ public abstract class QRBase extends Message
    * @param id The query id.
    * @param type The type of the query.
    */
-  public QRBase(String id,
-                String type)
+  public QRBase(String id, String type)
   {
     super(type);
     this.id = Preconditions.checkNotNull(id);
@@ -93,9 +92,7 @@ public abstract class QRBase extends Message
    * @param type The type of the query.
    * @param countdown The countdown for the query.
    */
-  public QRBase(String id,
-                String type,
-                long countdown)
+  public QRBase(String id, String type, long countdown)
   {
     this(id, type);
     setCountdown(countdown);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/Query.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Query.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Query.java
index 835c599..fc88886 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Query.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Query.java
@@ -62,20 +62,18 @@ public abstract class Query extends QRBase
    * @param id The query id.
    * @param type The type of the query.
    */
-  public Query(String id,
-               String type)
+  public Query(String id, String type)
   {
     super(id, type);
   }
+
   /**
    * Creates a query with the given id, type, and schemaKeys.
    * @param id The query id.
    * @param type The type of the query.
    * @param schemaKeys The schemaKeys for the query.
    */
-  public Query(String id,
-               String type,
-               Map<String, String> schemaKeys)
+  public Query(String id, String type, Map<String, String> schemaKeys)
   {
     super(id, type);
     setSchemaKeys(schemaKeys);
@@ -87,9 +85,7 @@ public abstract class Query extends QRBase
    * @param type The type of the query.
    * @param countdown The countdown for the query.
    */
-  public Query(String id,
-               String type,
-               long countdown)
+  public Query(String id, String type, long countdown)
   {
     super(id, type, countdown);
   }
@@ -101,10 +97,7 @@ public abstract class Query extends QRBase
    * @param countdown The countdown for the query.
    * @param schemaKeys The schemaKeys for the query.
    */
-  public Query(String id,
-               String type,
-               long countdown,
-               Map<String, String> schemaKeys)
+  public Query(String id, String type, long countdown, Map<String, String> schemaKeys)
   {
     super(id, type, countdown);
     setSchemaKeys(schemaKeys);
@@ -116,11 +109,11 @@ public abstract class Query extends QRBase
    */
   private void setSchemaKeys(Map<String, String> schemaKeys)
   {
-    if(schemaKeys == null) {
+    if (schemaKeys == null) {
       return;
     }
 
-    for(Map.Entry<String, String> entry: schemaKeys.entrySet()) {
+    for (Map.Entry<String, String> entry : schemaKeys.entrySet()) {
       Preconditions.checkNotNull(entry.getKey());
       Preconditions.checkNotNull(entry.getValue());
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/Result.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Result.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Result.java
index 0841e13..ea08ce0 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Result.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Result.java
@@ -53,8 +53,7 @@ public abstract class Result extends QRBase
    * @param query The query that this result is a response to.
    * @param countdown The countdown for this result.
    */
-  public Result(Query query,
-                long countdown)
+  public Result(Query query, long countdown)
   {
     super(query.getId());
     setQuery(query);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/ResultFormatter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/ResultFormatter.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/ResultFormatter.java
index 44a9c8c..1cd6712 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/ResultFormatter.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/ResultFormatter.java
@@ -114,34 +114,28 @@ public class ResultFormatter implements Serializable
   {
     Type type = Type.CLASS_TO_TYPE.get(object.getClass());
 
-    if(type == null) {
+    if (type == null) {
       return object.toString();
     }
 
-    switch(type) {
-      case FLOAT:
-      {
-        return format((float) ((Float) object));
+    switch (type) {
+      case FLOAT: {
+        return format((float)((Float)object));
       }
-      case DOUBLE:
-      {
-        return format((double) ((Double) object));
+      case DOUBLE: {
+        return format((double)((Double)object));
       }
-      case BYTE:
-      {
-        return format((byte) ((Byte) object));
+      case BYTE: {
+        return format((byte)((Byte)object));
       }
-      case SHORT:
-      {
-        return format((short) ((Short) object));
+      case SHORT: {
+        return format((short)((Short)object));
       }
-      case INTEGER:
-      {
-        return format((int) ((Integer) object));
+      case INTEGER: {
+        return format((int)((Integer)object));
       }
-      case LONG:
-      {
-        return format((long) ((Long) object));
+      case LONG: {
+        return format((long)((Long)object));
       }
       default:
         return object.toString();
@@ -157,7 +151,7 @@ public class ResultFormatter implements Serializable
   {
     DecimalFormat df = getFloatFormat();
 
-    if(df != null) {
+    if (df != null) {
       return df.format(val);
     }
 
@@ -173,7 +167,7 @@ public class ResultFormatter implements Serializable
   {
     DecimalFormat df = getDoubleFormat();
 
-    if(df != null) {
+    if (df != null) {
       return df.format(val);
     }
 
@@ -189,7 +183,7 @@ public class ResultFormatter implements Serializable
   {
     DecimalFormat df = getByteFormat();
 
-    if(df != null) {
+    if (df != null) {
       return df.format(val);
     }
 
@@ -205,7 +199,7 @@ public class ResultFormatter implements Serializable
   {
     DecimalFormat df = getShortFormat();
 
-    if(df != null) {
+    if (df != null) {
       return df.format(val);
     }
 
@@ -221,7 +215,7 @@ public class ResultFormatter implements Serializable
   {
     DecimalFormat df = getIntFormat();
 
-    if(df != null) {
+    if (df != null) {
       return df.format(val);
     }
 
@@ -237,7 +231,7 @@ public class ResultFormatter implements Serializable
   {
     DecimalFormat df = getLongFormat();
 
-    if(df != null) {
+    if (df != null) {
       return df.format(val);
     }
 
@@ -250,7 +244,7 @@ public class ResultFormatter implements Serializable
    */
   public DecimalFormat getFloatFormat()
   {
-    if(floatFormat == null && floatFormatString != null) {
+    if (floatFormat == null && floatFormatString != null) {
       floatFormat = new DecimalFormat(floatFormatString);
     }
 
@@ -263,7 +257,7 @@ public class ResultFormatter implements Serializable
    */
   public DecimalFormat getDoubleFormat()
   {
-    if(doubleFormat == null && doubleFormatString != null) {
+    if (doubleFormat == null && doubleFormatString != null) {
       doubleFormat = new DecimalFormat(doubleFormatString);
     }
 
@@ -276,7 +270,7 @@ public class ResultFormatter implements Serializable
    */
   public DecimalFormat getByteFormat()
   {
-    if(byteFormat == null && byteFormatString != null) {
+    if (byteFormat == null && byteFormatString != null) {
       byteFormat = new DecimalFormat(byteFormatString);
     }
 
@@ -289,7 +283,7 @@ public class ResultFormatter implements Serializable
    */
   public DecimalFormat getShortFormat()
   {
-    if(shortFormat == null && shortFormatString != null) {
+    if (shortFormat == null && shortFormatString != null) {
       shortFormat = new DecimalFormat(shortFormatString);
     }
 
@@ -302,7 +296,7 @@ public class ResultFormatter implements Serializable
    */
   public DecimalFormat getIntFormat()
   {
-    if(intFormat == null && intFormatString != null) {
+    if (intFormat == null && intFormatString != null) {
       intFormat = new DecimalFormat(intFormatString);
     }
 
@@ -315,7 +309,7 @@ public class ResultFormatter implements Serializable
    */
   public DecimalFormat getLongFormat()
   {
-    if(longFormat == null && longFormatString != null) {
+    if (longFormat == null && longFormatString != null) {
       longFormat = new DecimalFormat(longFormatString);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java
index e2e21da..8260c81 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java
@@ -41,26 +41,31 @@ public interface Schema
    * @return The id of the schema.
    */
   public int getSchemaID();
+
   /**
    * Gets the type of the schema (e.x. point, dimensions).
    * @return The type of the schema.
    */
   public String getSchemaType();
+
   /**
    * Gets the version of the schema.
    * @return The version of the schema.
    */
   public String getSchemaVersion();
+
   /**
    * Gets the AppData json to serve in response to a schema query.
    * @return The AppData json to serve in response to a schema query.
    */
   public String getSchemaJSON();
+
   /**
    * Gets the schema keys which are used to send queries targeted to this schema.
    * @return The schema keys which are used to send queries targeted to this schema.
    */
   public Map<String, String> getSchemaKeys();
+
   /**
    * Sets the schema keys for this schema.
    * @param schemaKeys The schema keys for this schema.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaQuery.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaQuery.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaQuery.java
index 8b9cfe0..a3fd69d 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaQuery.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaQuery.java
@@ -33,9 +33,9 @@ import com.datatorrent.lib.appdata.query.serde.SimpleDataValidator;
  * This class represents a schema query.
  * @since 3.0.0
  */
-@MessageType(type=SchemaQuery.TYPE)
-@MessageDeserializerInfo(clazz=SchemaQueryDeserializer.class)
-@MessageValidatorInfo(clazz=SimpleDataValidator.class)
+@MessageType(type = SchemaQuery.TYPE)
+@MessageDeserializerInfo(clazz = SchemaQueryDeserializer.class)
+@MessageValidatorInfo(clazz = SimpleDataValidator.class)
 public class SchemaQuery extends Query
 {
   public static final String FIELD_CONTEXT = "context";
@@ -70,15 +70,12 @@ public class SchemaQuery extends Query
    * @param id The id of the query.
    * @param schemaKeys The schema keys for the requested schema.
    */
-  public SchemaQuery(String id,
-                     Map<String, String> schemaKeys)
+  public SchemaQuery(String id, Map<String, String> schemaKeys)
   {
     super(id, TYPE, schemaKeys);
   }
 
-  public SchemaQuery(String id,
-                     Map<String, String> schemaKeys,
-                     Map<String, String> contextKeys)
+  public SchemaQuery(String id, Map<String, String> schemaKeys, Map<String, String> contextKeys)
   {
     super(id, TYPE);
     this.schemaKeys = schemaKeys;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistry.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistry.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistry.java
index 93e8bf2..374e16c 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistry.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistry.java
@@ -34,17 +34,20 @@ public interface SchemaRegistry
    * @return The schema result.
    */
   public SchemaResult getSchemaResult(SchemaQuery schemaQuery);
+
   /**
    * Registers the given schema with this schema registry.
    * @param schema The schema to register with this registry.
    */
   public void registerSchema(Schema schema);
+
   /**
    * Registers the given schema with the given schema keys.
    * @param schema The schema to register.
    * @param schemaKeys The schema keys that correspond with the given schema.
    */
   public void registerSchema(Schema schema, Map<String, String> schemaKeys);
+
   /**
    * Gets the schema corresponding to the given schema keys.
    * @param schemaKeys The schema keys for a schema.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultiple.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultiple.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultiple.java
index ee63af8..eaabcbf 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultiple.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultiple.java
@@ -18,14 +18,14 @@
  */
 package com.datatorrent.lib.appdata.schemas;
 
-import com.google.common.base.Preconditions;
 import java.io.Serializable;
-
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
 
+import com.google.common.base.Preconditions;
+
 import com.datatorrent.lib.appdata.datastructs.DimensionalTable;
 
 /**
@@ -66,8 +66,7 @@ public class SchemaRegistryMultiple implements SchemaRegistry, Serializable
    * @param schemaComparator The comparator used to order the schemas returned in the {@link SchemaResult} produced
    * by {@link SchemaRegistryMultiple#getSchemaResult(com.datatorrent.lib.appdata.schemas.SchemaQuery)}
    */
-  public SchemaRegistryMultiple(List<String> schemaKeys,
-                                Comparator<Schema> schemaComparator)
+  public SchemaRegistryMultiple(List<String> schemaKeys, Comparator<Schema> schemaComparator)
   {
     this(schemaKeys);
     this.schemaComparator = Preconditions.checkNotNull(schemaComparator);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistrySingle.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistrySingle.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistrySingle.java
index 8770fb2..12c5434 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistrySingle.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaRegistrySingle.java
@@ -19,14 +19,13 @@
 package com.datatorrent.lib.appdata.schemas;
 
 import java.io.Serializable;
-
 import java.util.Map;
 
-import com.google.common.base.Preconditions;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+
 /**
  * This schema registry holds a single schema. It is intended to be used in operators
  * which serve a single schema.
@@ -61,10 +60,8 @@ public class SchemaRegistrySingle implements SchemaRegistry, Serializable
   private void setSchema(Schema schema)
   {
     Preconditions.checkNotNull(schema);
-    Preconditions.checkArgument(schema.getSchemaKeys() == null,
-                                "The provided schema should not have schema keys "
-                                + schema.getSchemaKeys()
-                                + " since they will never be used.");
+    Preconditions.checkArgument(schema.getSchemaKeys() == null, "The provided schema should not have schema keys "
+        + schema.getSchemaKeys() + " since they will never be used.");
 
     this.schema = schema;
   }
@@ -74,8 +71,9 @@ public class SchemaRegistrySingle implements SchemaRegistry, Serializable
   {
     Preconditions.checkNotNull(schemaQuery, "This should never happen.");
 
-    if(schemaQuery.getSchemaKeys() != null) {
-      logger.error("Schema keys in the given query don't apply for single schema registry: schemaKeys={}", schemaQuery.getSchemaKeys());
+    if (schemaQuery.getSchemaKeys() != null) {
+      logger.error("Schema keys in the given query don't apply for single schema registry: schemaKeys={}",
+          schemaQuery.getSchemaKeys());
       return null;
     }
 
@@ -104,6 +102,6 @@ public class SchemaRegistrySingle implements SchemaRegistry, Serializable
   @Override
   public int size()
   {
-    return schema == null ? 0: 1;
+    return schema == null ? 0 : 1;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResult.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResult.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResult.java
index 616dce9..b7cc6bc 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResult.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResult.java
@@ -30,8 +30,8 @@ import com.datatorrent.lib.appdata.query.serde.MessageType;
  * as a result to a {@link SchemaQuery}.
  * @since 3.0.0
  */
-@MessageType(type=SchemaResult.TYPE)
-@MessageSerializerInfo(clazz=SchemaResultSerializer.class)
+@MessageType(type = SchemaResult.TYPE)
+@MessageSerializerInfo(clazz = SchemaResultSerializer.class)
 public class SchemaResult extends Result
 {
   /**
@@ -50,8 +50,7 @@ public class SchemaResult extends Result
    * @param schemaQuery
    * @param genericSchemas
    */
-  public SchemaResult(SchemaQuery schemaQuery,
-                      Schema... genericSchemas)
+  public SchemaResult(SchemaQuery schemaQuery, Schema... genericSchemas)
   {
     super(schemaQuery);
     setGenericSchemas(genericSchemas);
@@ -63,8 +62,7 @@ public class SchemaResult extends Result
    * @param schemaQuery The schema query which this schema result will be a response to.
    * @param genericSchemas The schemas to return in the schema result payload.
    */
-  public SchemaResult(SchemaQuery schemaQuery,
-                      List<Schema> genericSchemas)
+  public SchemaResult(SchemaQuery schemaQuery, List<Schema> genericSchemas)
   {
     super(schemaQuery);
     setGenericSchemas(genericSchemas);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResultSerializer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResultSerializer.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResultSerializer.java
index 133affd..d9fd92c 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResultSerializer.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaResultSerializer.java
@@ -43,14 +43,11 @@ public class SchemaResultSerializer implements CustomMessageSerializer
   @Override
   public String serialize(Message message, ResultFormatter resultFormatter)
   {
-    if(!(message instanceof SchemaResult))
-    {
-      throw new IllegalArgumentException("Must receive a "
-                                         + SchemaResult.class
-                                         + " object.");
+    if (!(message instanceof SchemaResult)) {
+      throw new IllegalArgumentException("Must receive a " + SchemaResult.class + " object.");
     }
 
-    SchemaResult genericSchemaResult = (SchemaResult) message;
+    SchemaResult genericSchemaResult = (SchemaResult)message;
 
     StringBuilder sb = new StringBuilder();
 
@@ -58,20 +55,16 @@ public class SchemaResultSerializer implements CustomMessageSerializer
     logger.debug("result id {}", genericSchemaResult.getId());
     logger.debug("result type {}", genericSchemaResult.getType());
 
-    sb.append("{\"").append(Result.FIELD_ID).
-    append("\":\"").append(genericSchemaResult.getId()).
-    append("\",\"").append(Result.FIELD_TYPE).
-    append("\":\"").append(genericSchemaResult.getType()).
-    append("\",\"").append(Result.FIELD_DATA).
-    append("\":");
+    sb.append("{\"").append(Result.FIELD_ID).append("\":\"").append(genericSchemaResult.getId())
+        .append("\",\"").append(Result.FIELD_TYPE).append("\":\"").append(genericSchemaResult.getType())
+        .append("\",\"").append(Result.FIELD_DATA).append("\":");
 
     JSONArray schemaArray = new JSONArray();
 
-    for(Schema schema: genericSchemaResult.getGenericSchemas()) {
+    for (Schema schema : genericSchemaResult.getGenericSchemas()) {
       try {
         schemaArray.put(new JSONObject(schema.getSchemaJSON()));
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         throw new RuntimeException(ex);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaUtils.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaUtils.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaUtils.java
index dcfbc8d..b628662 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaUtils.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SchemaUtils.java
@@ -69,10 +69,8 @@ public class SchemaUtils
       InputStream is = SchemaUtils.class.getClassLoader().getResourceAsStream(resource);
       Preconditions.checkArgument(is != null, resource + " could not be found in the resources.");
 
-      IOUtils.copy(is,
-                   stringWriter);
-    }
-    catch(IOException ex) {
+      IOUtils.copy(is, stringWriter);
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
     return stringWriter.toString();
@@ -89,8 +87,7 @@ public class SchemaUtils
 
     try {
       IOUtils.copy(inputStream, stringWriter);
-    }
-    catch(IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -103,8 +100,7 @@ public class SchemaUtils
    * @param fields The keys in the {@link JSONObject} to check.
    * @return True if the given {@link JSONObject} contains all the given keys. False otherwise.
    */
-  public static boolean checkValidKeys(JSONObject jo,
-                                       Fields fields)
+  public static boolean checkValidKeys(JSONObject jo, Fields fields)
   {
     @SuppressWarnings("unchecked")
     Set<String> fieldSet = fields.getFields();
@@ -119,8 +115,7 @@ public class SchemaUtils
    * @param jo The {@link JSONObject} to check.
    * @param fields The keys in the {@link JSONObject} to check.
    */
-  public static void checkValidKeysEx(JSONObject jo,
-                                      Fields fields)
+  public static void checkValidKeysEx(JSONObject jo, Fields fields)
   {
     @SuppressWarnings("unchecked")
     Set<String> fieldSet = fields.getFields();
@@ -128,10 +123,8 @@ public class SchemaUtils
 
     if (!jsonKeys.containsAll(fieldSet)) {
 
-      throw new IllegalArgumentException("The given set of keys "
-                                         + fieldSet
-                                         + " doesn't equal the set of keys in the json "
-                                         + jsonKeys);
+      throw new IllegalArgumentException("The given set of keys " + fieldSet
+          + " doesn't equal the set of keys in the json " + jsonKeys);
     }
   }
 
@@ -141,15 +134,12 @@ public class SchemaUtils
    * @param fieldsCollection The keys in the {@link JSONObject} to check.
    * @return True if the given {@link JSONObject} contains all the given keys. False otherwise.
    */
-  public static boolean checkValidKeys(JSONObject jo,
-                                       Collection<Fields> fieldsCollection)
+  public static boolean checkValidKeys(JSONObject jo, Collection<Fields> fieldsCollection)
   {
-    return checkValidKeysHelper(jo,
-                                fieldsCollection);
+    return checkValidKeysHelper(jo, fieldsCollection);
   }
 
-  private static boolean checkValidKeysHelper(JSONObject jo,
-                                              Collection<Fields> fieldsCollection)
+  private static boolean checkValidKeysHelper(JSONObject jo, Collection<Fields> fieldsCollection)
   {
     for (Fields fields: fieldsCollection) {
       LOG.debug("Checking keys: {}", fields);
@@ -158,17 +148,14 @@ public class SchemaUtils
       }
     }
 
-    LOG.error("The first level of keys in the provided JSON {} do not match any of the " +
-              "valid keysets {}",
-              getSetOfJSONKeys(jo),
-              fieldsCollection);
+    LOG.error("The first level of keys in the provided JSON {} do not match any of the " + "valid keysets {}",
+        getSetOfJSONKeys(jo), fieldsCollection);
     return false;
   }
 
   public static boolean checkValidKeys(JSONObject jo, List<Fields> fieldsCollection)
   {
-    return checkValidKeysHelper(jo,
-                                fieldsCollection);
+    return checkValidKeysHelper(jo, fieldsCollection);
   }
 
   /**
@@ -178,15 +165,12 @@ public class SchemaUtils
    * @param fieldsCollection The keys in the {@link JSONObject} to check.
    * @return True if the given {@link JSONObject} contains all the given keys. False otherwise.
    */
-  public static boolean checkValidKeysEx(JSONObject jo,
-                                         Collection<Fields> fieldsCollection)
+  public static boolean checkValidKeysEx(JSONObject jo, Collection<Fields> fieldsCollection)
   {
-    return checkValidKeysExHelper(jo,
-                                  fieldsCollection);
+    return checkValidKeysExHelper(jo, fieldsCollection);
   }
 
-  public static boolean checkValidKeysExHelper(JSONObject jo,
-                                               Collection<Fields> fieldsCollection)
+  public static boolean checkValidKeysExHelper(JSONObject jo, Collection<Fields> fieldsCollection)
   {
     for (Fields fields: fieldsCollection) {
       if (checkValidKeys(jo, fields)) {
@@ -196,10 +180,8 @@ public class SchemaUtils
 
     Set<String> keys = getSetOfJSONKeys(jo);
 
-    throw new IllegalArgumentException("The given json object has an invalid set of keys: " +
-                                       keys +
-                                       "\nOne of the following key combinations was expected:\n" +
-                                       fieldsCollection);
+    throw new IllegalArgumentException("The given json object has an invalid set of keys: " + keys
+        + "\nOne of the following key combinations was expected:\n" + fieldsCollection);
   }
 
   public static boolean checkValidKeysEx(JSONObject jo, List<Fields> fieldsCollection)
@@ -225,7 +207,7 @@ public class SchemaUtils
   {
     Map<String, String> fieldToTypeString = Maps.newHashMap();
 
-    for(Map.Entry<String, Type> entry: fieldToType.entrySet()) {
+    for (Map.Entry<String, Type> entry : fieldToType.entrySet()) {
       String field = entry.getKey();
       String typeString = entry.getValue().name();
 
@@ -237,12 +219,11 @@ public class SchemaUtils
 
   public static JSONArray findFirstKeyJSONArray(JSONObject jo, String key)
   {
-    if(jo.has(key)) {
+    if (jo.has(key)) {
       try {
         JSONArray jsonArray = jo.getJSONArray(key);
         return jsonArray;
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         throw new RuntimeException(ex);
       }
     }
@@ -250,22 +231,21 @@ public class SchemaUtils
     @SuppressWarnings("unchecked")
     Iterator<String> keyIterator = jo.keys();
 
-    while(keyIterator.hasNext()) {
+    while (keyIterator.hasNext()) {
       String childKey = keyIterator.next();
 
       JSONArray childJa = null;
 
       try {
         childJa = jo.getJSONArray(childKey);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         //Do nothing
       }
 
-      if(childJa != null) {
+      if (childJa != null) {
         JSONArray result = findFirstKeyJSONArray(childJa, key);
 
-        if(result != null) {
+        if (result != null) {
           return result;
         }
 
@@ -276,15 +256,14 @@ public class SchemaUtils
 
       try {
         childJo = jo.getJSONObject(childKey);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         //Do nothing
       }
 
-      if(childJo != null) {
+      if (childJo != null) {
         JSONArray result = findFirstKeyJSONArray(childJo, key);
 
-        if(result != null) {
+        if (result != null) {
           return result;
         }
       }
@@ -295,22 +274,19 @@ public class SchemaUtils
 
   public static JSONArray findFirstKeyJSONArray(JSONArray ja, String key)
   {
-    for(int index = 0;
-        index < ja.length();
-        index++) {
+    for (int index = 0; index < ja.length(); index++) {
       JSONArray childJa = null;
 
       try {
         childJa = ja.getJSONArray(index);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         //Do nothing
       }
 
-      if(childJa != null) {
+      if (childJa != null) {
         JSONArray result = findFirstKeyJSONArray(childJa, key);
 
-        if(result != null) {
+        if (result != null) {
           return result;
         }
 
@@ -321,15 +297,14 @@ public class SchemaUtils
 
       try {
         childJo = ja.getJSONObject(index);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         //Do nothing
       }
 
-      if(childJo != null) {
+      if (childJo != null) {
         JSONArray result = findFirstKeyJSONArray(childJo, key);
 
-        if(result != null) {
+        if (result != null) {
           return result;
         }
       }
@@ -340,12 +315,11 @@ public class SchemaUtils
 
   public static JSONObject findFirstKeyJSONObject(JSONObject jo, String key)
   {
-    if(jo.has(key)) {
+    if (jo.has(key)) {
       try {
         JSONObject jsonObject = jo.getJSONObject(key);
         return jsonObject;
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         throw new RuntimeException(ex);
       }
     }
@@ -353,22 +327,21 @@ public class SchemaUtils
     @SuppressWarnings("unchecked")
     Iterator<String> keyIterator = jo.keys();
 
-    while(keyIterator.hasNext()) {
+    while (keyIterator.hasNext()) {
       String childKey = keyIterator.next();
 
       JSONArray childJa = null;
 
       try {
         childJa = jo.getJSONArray(childKey);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         //Do nothing
       }
 
-      if(childJa != null) {
+      if (childJa != null) {
         JSONObject result = findFirstKeyJSONObject(childJa, key);
 
-        if(result != null) {
+        if (result != null) {
           return result;
         }
 
@@ -379,15 +352,14 @@ public class SchemaUtils
 
       try {
         childJo = jo.getJSONObject(childKey);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         //Do nothing
       }
 
-      if(childJo != null) {
+      if (childJo != null) {
         JSONObject result = findFirstKeyJSONObject(childJo, key);
 
-        if(result != null) {
+        if (result != null) {
           return result;
         }
       }
@@ -398,22 +370,19 @@ public class SchemaUtils
 
   public static JSONObject findFirstKeyJSONObject(JSONArray ja, String key)
   {
-    for(int index = 0;
-        index < ja.length();
-        index++) {
+    for (int index = 0; index < ja.length(); index++) {
       JSONArray childJa = null;
 
       try {
         childJa = ja.getJSONArray(index);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         //Do nothing
       }
 
-      if(childJa != null) {
+      if (childJa != null) {
         JSONObject result = findFirstKeyJSONObject(childJa, key);
 
-        if(result != null) {
+        if (result != null) {
           return result;
         }
 
@@ -424,15 +393,14 @@ public class SchemaUtils
 
       try {
         childJo = ja.getJSONObject(index);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         //Do nothing
       }
 
-      if(childJo != null) {
+      if (childJo != null) {
         JSONObject result = findFirstKeyJSONObject(childJo, key);
 
-        if(result != null) {
+        if (result != null) {
           return result;
         }
       }
@@ -452,14 +420,13 @@ public class SchemaUtils
     @SuppressWarnings("unchecked")
     Iterator<String> keyIterator = jo.keys();
 
-    while(keyIterator.hasNext()) {
+    while (keyIterator.hasNext()) {
       String key = keyIterator.next();
       String value;
 
       try {
         value = jo.getString(key);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         throw new RuntimeException(ex);
       }
 
@@ -478,14 +445,13 @@ public class SchemaUtils
   {
     JSONObject jo = new JSONObject();
 
-    for(Map.Entry<String, String> entry: map.entrySet()) {
+    for (Map.Entry<String, String> entry : map.entrySet()) {
       String key = entry.getKey();
       String value = entry.getValue();
 
       try {
         jo.put(key, value);
-      }
-      catch(JSONException ex) {
+      } catch (JSONException ex) {
         throw new RuntimeException(ex);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java
index aed9013..5010580 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java
@@ -22,13 +22,13 @@ import java.util.Collections;
 import java.util.Map;
 import java.util.Set;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
-
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+
 import com.datatorrent.netlet.util.DTThrowable;
 
 /**
@@ -118,8 +118,7 @@ public class SnapshotSchema implements Schema
    * @param schemaJSON The JSON defining this schema.
    * @param schemaKeys The schema keys tied to this schema.
    */
-  public SnapshotSchema(String schemaJSON,
-                       Map<String, String> schemaKeys)
+  public SnapshotSchema(String schemaJSON, Map<String, String> schemaKeys)
   {
     this(schemaJSON);
 
@@ -133,12 +132,9 @@ public class SnapshotSchema implements Schema
    * @param schemaJSON The schemaJSON this schema is built from.
    * @param schemaKeys The schemaKeys associated with this schema.
    */
-  public SnapshotSchema(int schemaID,
-                       String schemaJSON,
-                       Map<String, String> schemaKeys)
+  public SnapshotSchema(int schemaID, String schemaJSON, Map<String, String> schemaKeys)
   {
-    this(schemaJSON,
-         schemaKeys);
+    this(schemaJSON, schemaKeys);
 
     this.schemaID = schemaID;
   }
@@ -153,8 +149,7 @@ public class SnapshotSchema implements Schema
 
     try {
       initialize();
-    }
-    catch(Exception ex) {
+    } catch (Exception ex) {
       DTThrowable.rethrow(ex);
     }
   }
@@ -165,8 +160,7 @@ public class SnapshotSchema implements Schema
    * @param schemaID The schemaID associated with this schema.
    * @param schemaJSON The JSON that this schema is constructed from.
    */
-  public SnapshotSchema(int schemaID,
-                       String schemaJSON)
+  public SnapshotSchema(int schemaID, String schemaJSON)
   {
     this(schemaJSON);
     this.schemaID = schemaID;
@@ -177,12 +171,12 @@ public class SnapshotSchema implements Schema
   {
     changed = true;
 
-    if(schemaKeys == null) {
+    if (schemaKeys == null) {
       this.schemaKeys = null;
       return;
     }
 
-    for(Map.Entry<String, String> entry: schemaKeys.entrySet()) {
+    for (Map.Entry<String, String> entry : schemaKeys.entrySet()) {
       Preconditions.checkNotNull(entry.getKey());
       Preconditions.checkNotNull(entry.getValue());
     }
@@ -199,9 +193,8 @@ public class SnapshotSchema implements Schema
   {
     schema = new JSONObject(schemaJSON);
 
-    if(schemaKeys != null) {
-      schema.put(Schema.FIELD_SCHEMA_KEYS,
-                 SchemaUtils.createJSONObject(schemaKeys));
+    if (schemaKeys != null) {
+      schema.put(Schema.FIELD_SCHEMA_KEYS, SchemaUtils.createJSONObject(schemaKeys));
     }
 
     valueToType = Maps.newHashMap();
@@ -209,12 +202,9 @@ public class SnapshotSchema implements Schema
     JSONArray values = schema.getJSONArray(FIELD_VALUES);
 
     Preconditions.checkState(values.length() > 0,
-                             "The schema does not specify any values.");
+        "The schema does not specify any values.");
 
-    for(int index = 0;
-        index < values.length();
-        index++)
-    {
+    for (int index = 0; index < values.length(); index++) {
       JSONObject value = values.getJSONObject(index);
       String name = value.getString(FIELD_VALUES_NAME);
       String typeName = value.getString(FIELD_VALUES_TYPE);
@@ -222,9 +212,7 @@ public class SnapshotSchema implements Schema
       Type type = Type.NAME_TO_TYPE.get(typeName);
       valueToType.put(name, type);
 
-      Preconditions.checkArgument(type != null,
-                                  typeName
-                                  + " is not a valid type.");
+      Preconditions.checkArgument(type != null, typeName + " is not a valid type.");
     }
 
     valueToType = Collections.unmodifiableMap(valueToType);
@@ -233,8 +221,7 @@ public class SnapshotSchema implements Schema
     try {
       schema.put(FIELD_SCHEMA_TYPE, SCHEMA_TYPE);
       schema.put(FIELD_SCHEMA_VERSION, SCHEMA_VERSION);
-    }
-    catch(JSONException e) {
+    } catch (JSONException e) {
       throw new RuntimeException(e);
     }
 
@@ -243,8 +230,9 @@ public class SnapshotSchema implements Schema
 
   public void setTags(Set<String> tags)
   {
-    if (tags == null || tags.isEmpty())
+    if (tags == null || tags.isEmpty()) {
       throw new IllegalArgumentException("tags can't be null or empty.");
+    }
 
     try {
       JSONArray tagArray = new JSONArray(tags);
@@ -270,19 +258,16 @@ public class SnapshotSchema implements Schema
   @Override
   public String getSchemaJSON()
   {
-    if(!changed && schemaJSON != null) {
+    if (!changed && schemaJSON != null) {
       return schemaJSON;
     }
 
-    if(schemaKeys == null) {
+    if (schemaKeys == null) {
       schema.remove(Schema.FIELD_SCHEMA_KEYS);
-    }
-    else {
+    } else {
       try {
-        schema.put(Schema.FIELD_SCHEMA_KEYS,
-                        SchemaUtils.createJSONObject(schemaKeys));
-      }
-      catch(JSONException ex) {
+        schema.put(Schema.FIELD_SCHEMA_KEYS, SchemaUtils.createJSONObject(schemaKeys));
+      } catch (JSONException ex) {
         throw new RuntimeException(ex);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/TimeBucket.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/TimeBucket.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/TimeBucket.java
index f1f24bc..5570ca0 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/TimeBucket.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/TimeBucket.java
@@ -91,13 +91,11 @@ public enum TimeBucket
    */
   public static final Map<TimeUnit, TimeBucket> TIME_UNIT_TO_TIME_BUCKET;
 
-  static
-  {
+  static {
     Map<String, TimeBucket> bucketToType = Maps.newHashMap();
     Map<TimeUnit, TimeBucket> timeUnitToTimeBucket = Maps.newHashMap();
 
-    for(TimeBucket timeBucket: TimeBucket.values())
-    {
+    for (TimeBucket timeBucket : TimeBucket.values()) {
       timeUnitToTimeBucket.put(timeBucket.getTimeUnit(), timeBucket);
       bucketToType.put(timeBucket.getText(), timeBucket);
     }
@@ -188,7 +186,7 @@ public enum TimeBucket
    */
   public long roundDown(long timestamp)
   {
-    if(timeUnit == null) {
+    if (timeUnit == null) {
       return 0;
     }
 
@@ -218,8 +216,7 @@ public enum TimeBucket
   public static TimeBucket getBucketEx(String name)
   {
     TimeBucket bucket = getBucket(name);
-    Preconditions.checkArgument(bucket != null,
-                                name + " is not a valid bucket type.");
+    Preconditions.checkArgument(bucket != null, name + " is not a valid bucket type.");
     return bucket;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/Type.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Type.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Type.java
index 1748e17..5e79466 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Type.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Type.java
@@ -106,7 +106,7 @@ public enum Type implements Serializable
   static {
     Map<String, Type> nameToType = Maps.newHashMap();
 
-    for(Type type: Type.values()) {
+    for (Type type : Type.values()) {
       nameToType.put(type.getName(), type);
     }
 
@@ -114,7 +114,7 @@ public enum Type implements Serializable
 
     Map<Class<?>, Type> clazzToType = Maps.newHashMap();
 
-    for(Type type: Type.values()) {
+    for (Type type : Type.values()) {
       clazzToType.put(type.getClazz(), type);
     }
 
@@ -153,11 +153,7 @@ public enum Type implements Serializable
    * @param clazz The Class of the corresponding Java type.
    * @param higherTypes The set of types to which this type can be promoted.
    */
-  Type(String name,
-       int byteSize,
-       JSONType jsonType,
-       Class<?> clazz,
-       Set<Type> higherTypes)
+  Type(String name, int byteSize, JSONType jsonType, Class<?> clazz, Set<Type> higherTypes)
   {
     this.name = name;
     this.byteSize = byteSize;
@@ -247,8 +243,7 @@ public enum Type implements Serializable
   {
     Type type = getType(name);
 
-    Preconditions.checkArgument(type != null,
-                                name + " is not a valid type.");
+    Preconditions.checkArgument(type != null, name + " is not a valid type.");
 
     return type;
   }
@@ -262,50 +257,41 @@ public enum Type implements Serializable
    */
   public static Object promote(Type from, Type to, Object o)
   {
-    if(from == to) {
+    if (from == to) {
       return o;
     }
 
-    Preconditions.checkArgument(!(from == Type.BOOLEAN
-                                  || from == Type.CHAR
-                                  || from == Type.LONG
-                                  || from == Type.DOUBLE),
-                                "Cannot convert "
-                                + Type.BOOLEAN.getName() + ", "
-                                + Type.CHAR.getName() + ", "
-                                + Type.LONG.getName() + ", or "
-                                + Type.DOUBLE + " to a larger type.");
+    Preconditions.checkArgument(!(from == Type.BOOLEAN || from == Type.CHAR || from == Type.LONG
+        || from == Type.DOUBLE), "Cannot convert " + Type.BOOLEAN.getName() + ", " + Type.CHAR.getName()
+        + ", " + Type.LONG.getName() + ", or " + Type.DOUBLE + " to a larger type.");
 
     Preconditions.checkArgument(from.getHigherTypes().contains(to),
-                                from.getName() + " cannot be promoted to " + to.getName());
+        from.getName() + " cannot be promoted to " + to.getName());
 
-    if(from == Type.FLOAT && to == Type.DOUBLE) {
+    if (from == Type.FLOAT && to == Type.DOUBLE) {
       return (Double)((Float)o).doubleValue();
     }
 
-    if(from == Type.BYTE) {
-      if(to == Type.SHORT) {
+    if (from == Type.BYTE) {
+      if (to == Type.SHORT) {
         return (Short)((Byte)o).shortValue();
-      }
-      else if(to == Type.INTEGER) {
+      } else if (to == Type.INTEGER) {
         return (Integer)((Byte)o).intValue();
-      }
-      else if(to == Type.LONG) {
+      } else if (to == Type.LONG) {
         return (Long)((Byte)o).longValue();
       }
     }
 
-    if(from == Type.SHORT) {
-      if(to == Type.INTEGER) {
+    if (from == Type.SHORT) {
+      if (to == Type.INTEGER) {
         return (Integer)((Short)o).intValue();
-      }
-      else if(to == Type.LONG) {
+      } else if (to == Type.LONG) {
         return (Long)((Short)o).longValue();
       }
     }
 
-    if(from == Type.INTEGER
-       && to == Type.LONG) {
+    if (from == Type.INTEGER
+        && to == Type.LONG) {
       return (Long)((Integer)o).longValue();
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java b/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java
index 236735f..0b03e79 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java
@@ -111,8 +111,8 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
   private Set<String> tags;
   
   @AppData.QueryPort
-  @InputPortFieldAnnotation(optional=true)
-  public transient final DefaultInputPort<String> query = new DefaultInputPort<String>()
+  @InputPortFieldAnnotation(optional = true)
+  public final transient DefaultInputPort<String> query = new DefaultInputPort<String>()
   {
     @Override
     public void process(String queryJSON)
@@ -151,7 +151,7 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
     }
   }
 
-  public transient final DefaultInputPort<List<INPUT_EVENT>> input = new DefaultInputPort<List<INPUT_EVENT>>()
+  public final transient DefaultInputPort<List<INPUT_EVENT>> input = new DefaultInputPort<List<INPUT_EVENT>>()
   {
     @Override
     public void process(List<INPUT_EVENT> rows)
@@ -187,7 +187,7 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
 
 
   @Override
-  final public void activate(OperatorContext ctx)
+  public final void activate(OperatorContext ctx)
   {
     if (embeddableQueryInfoProvider != null) {
       embeddableQueryInfoProvider.activate(ctx);
@@ -213,8 +213,7 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
     if (embeddableQueryInfoProvider != null) {
       embeddableQueryInfoProvider.enableEmbeddedMode();
       LOG.info("An embeddable query operator is being used of class {}.", embeddableQueryInfoProvider.getClass().getName());
-      StoreUtils.attachOutputPortToInputPort(embeddableQueryInfoProvider.getOutputPort(),
-                                             query);
+      StoreUtils.attachOutputPortToInputPort(embeddableQueryInfoProvider.getOutputPort(), query);
       embeddableQueryInfoProvider.setup(context);
     }
   }
@@ -222,8 +221,9 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
   protected void setupSchema()
   {
     schema = new SnapshotSchema(snapshotSchemaJSON);
-    if (tags != null && !tags.isEmpty())
+    if (tags != null && !tags.isEmpty()) {
       schema.setTags(tags);
+    }
   }
 
   protected void setupQueryProcessor()
@@ -252,7 +252,7 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
     {
       Result result;
 
-      while((result = queryProcessor.process()) != null) {
+      while ((result = queryProcessor.process()) != null) {
         String resultJSON = resultSerializerFactory.serialize(result);
         LOG.debug("emitting {}", resultJSON);
         queryResult.emit(resultJSON);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMap.java b/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMap.java
index 8134ff9..0fc4200 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMap.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMap.java
@@ -23,13 +23,14 @@ import java.util.Map;
 
 import javax.validation.constraints.NotNull;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
 
 import com.datatorrent.lib.appdata.gpo.GPOMutable;
 import com.datatorrent.lib.appdata.schemas.FieldsDescriptor;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * This operator accepts a list of Map&lt;String,Object&gt; objects, and serves the data under the {@link SnapshotSchema}.
@@ -59,9 +60,7 @@ public class AppDataSnapshotServerMap extends AbstractAppDataSnapshotServer<Map<
 
     List<String> fields = fd.getFieldList();
 
-    for(int index = 0;
-        index < fields.size();
-        index++) {
+    for (int index = 0; index < fields.size(); index++) {
       String field = fields.get(index);
       values.setFieldGeneric(field, inputEvent.get(getMapField(field)));
     }
@@ -77,13 +76,13 @@ public class AppDataSnapshotServerMap extends AbstractAppDataSnapshotServer<Map<
    */
   private String getMapField(String field)
   {
-    if(tableFieldToMapField == null) {
+    if (tableFieldToMapField == null) {
       return field;
     }
 
     String mapField = tableFieldToMapField.get(field);
 
-    if(mapField == null) {
+    if (mapField == null) {
       return field;
     }
 
@@ -111,7 +110,7 @@ public class AppDataSnapshotServerMap extends AbstractAppDataSnapshotServer<Map<
   {
     Preconditions.checkNotNull(tableFieldToMapField);
 
-    for(Map.Entry<String, String> entry: tableFieldToMapField.entrySet()) {
+    for (Map.Entry<String, String> entry : tableFieldToMapField.entrySet()) {
       Preconditions.checkNotNull(entry.getKey());
       Preconditions.checkNotNull(entry.getValue());
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPOJO.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPOJO.java b/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPOJO.java
index 68d9017..ab3c760 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPOJO.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPOJO.java
@@ -77,7 +77,7 @@ public class AppDataSnapshotServerPOJO extends AbstractAppDataSnapshotServer<Obj
    */
   private void firstTuple(Object inputEvent)
   {
-    if(firstTupleProcessed) {
+    if (firstTupleProcessed) {
       return;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/codec/JavaSerializationStreamCodec.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/codec/JavaSerializationStreamCodec.java b/library/src/main/java/com/datatorrent/lib/codec/JavaSerializationStreamCodec.java
index b82fc14..1096a72 100644
--- a/library/src/main/java/com/datatorrent/lib/codec/JavaSerializationStreamCodec.java
+++ b/library/src/main/java/com/datatorrent/lib/codec/JavaSerializationStreamCodec.java
@@ -41,37 +41,37 @@ import com.datatorrent.netlet.util.Slice;
  */
 public class JavaSerializationStreamCodec<T extends Serializable> implements StreamCodec<T>, Serializable
 {
-	@Override
-	public Object fromByteArray(Slice fragment)
-	{
-		ByteArrayInputStream bis = new ByteArrayInputStream(fragment.buffer,
-				fragment.offset, fragment.length);
-		try {
-			ObjectInputStream ois = new ObjectInputStream(bis);
-			return ois.readObject();
-		} catch (Exception ioe) {
-			throw new RuntimeException(ioe);
-		}
-	}
+  @Override
+  public Object fromByteArray(Slice fragment)
+  {
+    ByteArrayInputStream bis = new ByteArrayInputStream(fragment.buffer,
+        fragment.offset, fragment.length);
+    try {
+      ObjectInputStream ois = new ObjectInputStream(bis);
+      return ois.readObject();
+    } catch (Exception ioe) {
+      throw new RuntimeException(ioe);
+    }
+  }
 
-	@Override
-	public Slice toByteArray(T object)
-	{
-		ByteArrayOutputStream bos = new ByteArrayOutputStream();
-		try {
-			ObjectOutputStream oos = new ObjectOutputStream(bos);
-			oos.writeObject(object);
-			oos.flush();
-			byte[] buffer = bos.toByteArray();
-			return new Slice(buffer, 0, buffer.length);
-		} catch (IOException ex) {
-			throw new RuntimeException(ex);
-		}
-	}
+  @Override
+  public Slice toByteArray(T object)
+  {
+    ByteArrayOutputStream bos = new ByteArrayOutputStream();
+    try {
+      ObjectOutputStream oos = new ObjectOutputStream(bos);
+      oos.writeObject(object);
+      oos.flush();
+      byte[] buffer = bos.toByteArray();
+      return new Slice(buffer, 0, buffer.length);
+    } catch (IOException ex) {
+      throw new RuntimeException(ex);
+    }
+  }
 
-	@Override
-	public int getPartition(T o)
-	{
-		return o.hashCode();
-	}
+  @Override
+  public int getPartition(T o)
+  {
+    return o.hashCode();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/converter/ByteArrayToStringConverterOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/converter/ByteArrayToStringConverterOperator.java b/library/src/main/java/com/datatorrent/lib/converter/ByteArrayToStringConverterOperator.java
index 7342cfd..33e5bf6 100644
--- a/library/src/main/java/com/datatorrent/lib/converter/ByteArrayToStringConverterOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/converter/ByteArrayToStringConverterOperator.java
@@ -18,10 +18,11 @@
  */
 package com.datatorrent.lib.converter;
 
-import com.datatorrent.common.util.BaseOperator;
+import java.nio.charset.Charset;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import java.nio.charset.Charset;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator converts Byte Array to String. User gets the option of providing character Encoding.
@@ -55,7 +56,7 @@ public class ByteArrayToStringConverterOperator extends BaseOperator
     @Override
     public void process(byte[] message)
     {
-      output.emit(characterEncoding == null? new String(message): new String(message, characterEncoding));
+      output.emit(characterEncoding == null ? new String(message) : new String(message, characterEncoding));
     }
 
   };

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/converter/Converter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/converter/Converter.java b/library/src/main/java/com/datatorrent/lib/converter/Converter.java
index f1d4325..ef999e4 100644
--- a/library/src/main/java/com/datatorrent/lib/converter/Converter.java
+++ b/library/src/main/java/com/datatorrent/lib/converter/Converter.java
@@ -36,8 +36,7 @@ public interface Converter<INPUT, OUTPUT>
    * Provide the implementation for converting tuples from one format to the
    * other
    * 
-   * @param INPUT
-   *          tuple of certain format
+   * @param tuple tuple of certain format
    * @return OUTPUT tuple of converted format
    */
   public OUTPUT convert(INPUT tuple);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/converter/MapToKeyHashValuePairConverter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/converter/MapToKeyHashValuePairConverter.java b/library/src/main/java/com/datatorrent/lib/converter/MapToKeyHashValuePairConverter.java
index bb09cd8..52d3273 100644
--- a/library/src/main/java/com/datatorrent/lib/converter/MapToKeyHashValuePairConverter.java
+++ b/library/src/main/java/com/datatorrent/lib/converter/MapToKeyHashValuePairConverter.java
@@ -21,9 +21,9 @@ package com.datatorrent.lib.converter;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.lib.util.KeyHashValPair;
 
 /**
@@ -36,7 +36,8 @@ import com.datatorrent.lib.util.KeyHashValPair;
  *
  * @since 3.0.0
  */
-public class MapToKeyHashValuePairConverter<K, V> extends BaseOperator {
+public class MapToKeyHashValuePairConverter<K, V> extends BaseOperator
+{
 
   /**
    * Input port which accepts Map<K, V>.
@@ -46,8 +47,7 @@ public class MapToKeyHashValuePairConverter<K, V> extends BaseOperator {
     @Override
     public void process(Map<K, V> tuple)
     {
-      for(Entry<K, V> entry:tuple.entrySet())
-      {
+      for (Entry<K, V> entry : tuple.entrySet()) {
         output.emit(new KeyHashValPair<K, V>(entry.getKey(), entry.getValue()));
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/converter/MapToKeyValuePairConverter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/converter/MapToKeyValuePairConverter.java b/library/src/main/java/com/datatorrent/lib/converter/MapToKeyValuePairConverter.java
index f7f63ed..efab4a5 100644
--- a/library/src/main/java/com/datatorrent/lib/converter/MapToKeyValuePairConverter.java
+++ b/library/src/main/java/com/datatorrent/lib/converter/MapToKeyValuePairConverter.java
@@ -21,11 +21,10 @@ package com.datatorrent.lib.converter;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import com.datatorrent.lib.util.KeyValPair;
-
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.lib.util.KeyValPair;
 
 /**
  *
@@ -37,7 +36,8 @@ import com.datatorrent.api.DefaultOutputPort;
  *
  * @since 3.0.0
  */
-public class MapToKeyValuePairConverter<K, V> extends BaseOperator {
+public class MapToKeyValuePairConverter<K, V> extends BaseOperator
+{
 
   /**
    * Input port which accepts Map<K, V>.
@@ -47,8 +47,7 @@ public class MapToKeyValuePairConverter<K, V> extends BaseOperator {
     @Override
     public void process(Map<K, V> tuple)
     {
-      for(Entry<K, V> entry:tuple.entrySet())
-      {
+      for (Entry<K, V> entry : tuple.entrySet()) {
         output.emit(new KeyValPair<K, V>(entry.getKey(), entry.getValue()));
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMap.java b/library/src/main/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMap.java
index ad93868..e39259e 100644
--- a/library/src/main/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMap.java
+++ b/library/src/main/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMap.java
@@ -22,9 +22,9 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  *
@@ -36,7 +36,8 @@ import com.datatorrent.api.DefaultOutputPort;
  *
  * @since 3.0.0
  */
-public class StringValueToNumberConverterForMap<K> extends BaseOperator {
+public class StringValueToNumberConverterForMap<K> extends BaseOperator
+{
 
   /**
    * Input port which accepts Map<K, Numeric String>.
@@ -47,8 +48,7 @@ public class StringValueToNumberConverterForMap<K> extends BaseOperator {
     public void process(Map<K, String> tuple)
     {
       Map<K, Number> outputMap = new HashMap<K, Number>();
-      for(Entry<K, String> entry:tuple.entrySet())
-      {
+      for (Entry<K, String> entry : tuple.entrySet()) {
         String val = entry.getValue();
         if (val == null) {
           return;
@@ -57,12 +57,10 @@ public class StringValueToNumberConverterForMap<K> extends BaseOperator {
         boolean errortuple = false;
         try {
           tvalue = Double.parseDouble(val);
-        }
-        catch (NumberFormatException e) {
+        } catch (NumberFormatException e) {
           errortuple = true;
         }
-        if(!errortuple)
-        {
+        if (!errortuple) {
           outputMap.put(entry.getKey(), tvalue);
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/counters/BasicCounters.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/counters/BasicCounters.java b/library/src/main/java/com/datatorrent/lib/counters/BasicCounters.java
index 60e2d8e..255ec03 100644
--- a/library/src/main/java/com/datatorrent/lib/counters/BasicCounters.java
+++ b/library/src/main/java/com/datatorrent/lib/counters/BasicCounters.java
@@ -25,9 +25,6 @@ import java.util.Map;
 
 import javax.annotation.Nonnull;
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
-
 import org.codehaus.jackson.JsonGenerator;
 import org.codehaus.jackson.map.JsonSerializer;
 import org.codehaus.jackson.map.SerializerProvider;
@@ -35,8 +32,10 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 
 import org.apache.commons.lang.mutable.Mutable;
 
-import com.datatorrent.api.Context;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
 
+import com.datatorrent.api.Context;
 import com.datatorrent.common.util.NumberAggregate;
 
 /**
@@ -142,7 +141,7 @@ public class BasicCounters<T extends Number & Mutable> implements Serializable
       for (Object counter : objects) {
         if (counter instanceof BasicCounters) {
           @SuppressWarnings("unchecked")
-          BasicCounters<T> physical = (BasicCounters<T>) counter;
+          BasicCounters<T> physical = (BasicCounters<T>)counter;
           ImmutableMap<Enum<?>, T> copy = physical.getCopy();
 
           for (Map.Entry<Enum<?>, T> entry : copy.entrySet()) {
@@ -178,7 +177,7 @@ public class BasicCounters<T extends Number & Mutable> implements Serializable
       for (Object counter : objects) {
         if (counter instanceof BasicCounters) {
           @SuppressWarnings("unchecked")
-          BasicCounters<T> physical = (BasicCounters<T>) counter;
+          BasicCounters<T> physical = (BasicCounters<T>)counter;
           ImmutableMap<Enum<?>, T> copy = physical.getCopy();
 
           for (Map.Entry<Enum<?>, T> entry : copy.entrySet()) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/AbstractAggregateTransactionableKeyValueStoreOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/AbstractAggregateTransactionableKeyValueStoreOutputOperator.java b/library/src/main/java/com/datatorrent/lib/db/AbstractAggregateTransactionableKeyValueStoreOutputOperator.java
index ba89f7b..5225329 100644
--- a/library/src/main/java/com/datatorrent/lib/db/AbstractAggregateTransactionableKeyValueStoreOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/AbstractAggregateTransactionableKeyValueStoreOutputOperator.java
@@ -34,6 +34,6 @@ package com.datatorrent.lib.db;
  */
 @org.apache.hadoop.classification.InterfaceStability.Evolving
 public abstract class AbstractAggregateTransactionableKeyValueStoreOutputOperator<T, S extends TransactionableKeyValueStore>
-        extends AbstractAggregateTransactionableStoreOutputOperator<T, S>
+    extends AbstractAggregateTransactionableStoreOutputOperator<T, S>
 {
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/AbstractBatchTransactionableStoreOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/AbstractBatchTransactionableStoreOutputOperator.java b/library/src/main/java/com/datatorrent/lib/db/AbstractBatchTransactionableStoreOutputOperator.java
index 771e679..fbb924a 100644
--- a/library/src/main/java/com/datatorrent/lib/db/AbstractBatchTransactionableStoreOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/AbstractBatchTransactionableStoreOutputOperator.java
@@ -35,10 +35,13 @@ import com.google.common.collect.Lists;
  * @since 1.0.2
  */
 @org.apache.hadoop.classification.InterfaceStability.Evolving
-public abstract class AbstractBatchTransactionableStoreOutputOperator<T, S extends TransactionableStore> extends AbstractAggregateTransactionableStoreOutputOperator<T, S> {
+public abstract class AbstractBatchTransactionableStoreOutputOperator<T, S extends TransactionableStore>
+    extends AbstractAggregateTransactionableStoreOutputOperator<T, S>
+{
 
   private Collection<T> tuples;
-  public AbstractBatchTransactionableStoreOutputOperator(){
+  public AbstractBatchTransactionableStoreOutputOperator()
+  {
     tuples = Lists.newArrayList();
   }
 
@@ -62,7 +65,8 @@ public abstract class AbstractBatchTransactionableStoreOutputOperator<T, S exten
   public abstract void processBatch(Collection<T> tuples);
 
   @Override
-  public void storeAggregate() {
+  public void storeAggregate()
+  {
     processBatch(tuples);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/AbstractKeyValueStoreInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/AbstractKeyValueStoreInputOperator.java b/library/src/main/java/com/datatorrent/lib/db/AbstractKeyValueStoreInputOperator.java
index 9b6ddc2..f3f8c63 100644
--- a/library/src/main/java/com/datatorrent/lib/db/AbstractKeyValueStoreInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/AbstractKeyValueStoreInputOperator.java
@@ -18,7 +18,11 @@
  */
 package com.datatorrent.lib.db;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 
 /**
  * This is the base implementation of an input operator which consumes data from a key value store.&nbsp;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableKeyValueStoreOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableKeyValueStoreOutputOperator.java b/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableKeyValueStoreOutputOperator.java
index f199986..fe828dc 100644
--- a/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableKeyValueStoreOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableKeyValueStoreOutputOperator.java
@@ -36,6 +36,6 @@ package com.datatorrent.lib.db;
  */
 @org.apache.hadoop.classification.InterfaceStability.Evolving
 public abstract class AbstractPassThruTransactionableKeyValueStoreOutputOperator<T, S extends TransactionableKeyValueStore>
-        extends AbstractPassThruTransactionableStoreOutputOperator<T, S>
+    extends AbstractPassThruTransactionableStoreOutputOperator<T, S>
 {
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableStoreOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableStoreOutputOperator.java b/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableStoreOutputOperator.java
index 55c8617..b471a63 100644
--- a/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableStoreOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/AbstractPassThruTransactionableStoreOutputOperator.java
@@ -35,7 +35,8 @@ package com.datatorrent.lib.db;
  * @since 0.9.3
  */
 @org.apache.hadoop.classification.InterfaceStability.Evolving
-public abstract class AbstractPassThruTransactionableStoreOutputOperator<T, S extends TransactionableStore> extends AbstractTransactionableStoreOutputOperator<T, S>
+public abstract class AbstractPassThruTransactionableStoreOutputOperator<T, S extends TransactionableStore>
+    extends AbstractTransactionableStoreOutputOperator<T, S>
 {
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/AbstractStoreInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/AbstractStoreInputOperator.java b/library/src/main/java/com/datatorrent/lib/db/AbstractStoreInputOperator.java
index a6eb780..18e23f4 100644
--- a/library/src/main/java/com/datatorrent/lib/db/AbstractStoreInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/AbstractStoreInputOperator.java
@@ -41,7 +41,7 @@ public abstract class AbstractStoreInputOperator<T, S extends Connectable> imple
   /**
    * The output port on which tuples read form a store are emitted.
    */
-  final public transient DefaultOutputPort<T> outputPort = new DefaultOutputPort<T>();
+  public final transient DefaultOutputPort<T> outputPort = new DefaultOutputPort<T>();
   protected S store;
   /**
    * Gets the store.
@@ -79,8 +79,7 @@ public abstract class AbstractStoreInputOperator<T, S extends Connectable> imple
   {
     try {
       store.connect();
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -90,8 +89,7 @@ public abstract class AbstractStoreInputOperator<T, S extends Connectable> imple
   {
     try {
       store.disconnect();
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       // ignore
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/AbstractStoreOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/AbstractStoreOutputOperator.java b/library/src/main/java/com/datatorrent/lib/db/AbstractStoreOutputOperator.java
index aff5b3e..2ef8d30 100644
--- a/library/src/main/java/com/datatorrent/lib/db/AbstractStoreOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/AbstractStoreOutputOperator.java
@@ -20,10 +20,10 @@ package com.datatorrent.lib.db;
 
 import java.io.IOException;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This is the base implementation of an output operator,
@@ -80,8 +80,7 @@ public abstract class AbstractStoreOutputOperator<T, S extends Connectable> exte
   {
     try {
       store.connect();
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -96,8 +95,7 @@ public abstract class AbstractStoreOutputOperator<T, S extends Connectable> exte
   {
     try {
       store.disconnect();
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/AbstractTransactionableStoreOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/AbstractTransactionableStoreOutputOperator.java b/library/src/main/java/com/datatorrent/lib/db/AbstractTransactionableStoreOutputOperator.java
index 61682c2..037a8b2 100644
--- a/library/src/main/java/com/datatorrent/lib/db/AbstractTransactionableStoreOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/AbstractTransactionableStoreOutputOperator.java
@@ -20,11 +20,11 @@ package com.datatorrent.lib.db;
 
 import java.io.IOException;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This is the base implementation of an output operator,
@@ -95,8 +95,7 @@ public abstract class AbstractTransactionableStoreOutputOperator<T, S extends Tr
       appId = context.getValue(DAG.APPLICATION_ID);
       operatorId = context.getId();
       committedWindowId = store.getCommittedWindowId(appId, operatorId);
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -115,8 +114,7 @@ public abstract class AbstractTransactionableStoreOutputOperator<T, S extends Tr
         store.rollbackTransaction();
       }
       store.disconnect();
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcNonTransactionableBatchOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcNonTransactionableBatchOutputOperator.java b/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcNonTransactionableBatchOutputOperator.java
index 03f7719..7dee870 100644
--- a/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcNonTransactionableBatchOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcNonTransactionableBatchOutputOperator.java
@@ -18,15 +18,19 @@
  */
 package com.datatorrent.lib.db.jdbc;
 
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.DAG;
-import com.google.common.collect.Lists;
 import java.sql.SQLException;
 import java.util.List;
+
 import javax.validation.constraints.Min;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DAG;
+
 /**
  * A generic output operator which updates the database without using transactions
  * and batches writes to increase performance. This operator satisfies the exactly once constraint
@@ -38,7 +42,7 @@ import org.slf4j.LoggerFactory;
  */
 public abstract class AbstractJdbcNonTransactionableBatchOutputOperator<T, S extends JdbcNonTransactionalStore> extends AbstractJdbcNonTransactionableOutputOperator<T, S>
 {
-  private static transient final Logger LOG = LoggerFactory.getLogger(AbstractJdbcNonTransactionableBatchOutputOperator.class);
+  private static final transient Logger LOG = LoggerFactory.getLogger(AbstractJdbcNonTransactionableBatchOutputOperator.class);
   public static final int DEFAULT_BATCH_SIZE = 1000;
 
   @Min(1)
@@ -93,7 +97,7 @@ public abstract class AbstractJdbcNonTransactionableBatchOutputOperator<T, S ext
 
     mode = context.getValue(OperatorContext.PROCESSING_MODE);
 
-    if(mode==ProcessingMode.AT_MOST_ONCE){
+    if (mode == ProcessingMode.AT_MOST_ONCE) {
       //Batch must be cleared to avoid writing same data twice
       tuples.clear();
     }
@@ -129,7 +133,7 @@ public abstract class AbstractJdbcNonTransactionableBatchOutputOperator<T, S ext
     super.endWindow();
 
     //This window is done so write it to the database.
-    if(committedWindowId < currentWindowId) {
+    if (committedWindowId < currentWindowId) {
       store.storeCommittedWindowId(appId, operatorId, currentWindowId);
       committedWindowId = currentWindowId;
     }
@@ -139,7 +143,7 @@ public abstract class AbstractJdbcNonTransactionableBatchOutputOperator<T, S ext
   public void processTuple(T tuple)
   {
     //Minimize duplicated data in the atleast once case
-    if(committedWindowId >= currentWindowId) {
+    if (committedWindowId >= currentWindowId) {
       return;
     }
 
@@ -149,7 +153,7 @@ public abstract class AbstractJdbcNonTransactionableBatchOutputOperator<T, S ext
       setStatementParameters(updateCommand, tuple);
       updateCommand.addBatch();
 
-      if(tuples.size() >= batchSize) {
+      if (tuples.size() >= batchSize) {
         tuples.clear();
         updateCommand.executeBatch();
         updateCommand.clearBatch();



[14/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregation.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregation.java b/library/src/main/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregation.java
index 7c9c350..3cace73 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregation.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregation.java
@@ -27,9 +27,10 @@ import java.util.regex.Pattern;
 
 import javax.validation.constraints.NotNull;
 
-import org.apache.commons.lang.mutable.MutableDouble;
-import org.slf4j.LoggerFactory;
 import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.commons.lang.mutable.MutableDouble;
 
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
@@ -52,9 +53,10 @@ public class MultiWindowDimensionAggregation implements Operator
   @SuppressWarnings("unused")
   private static final Logger logger = LoggerFactory.getLogger(MultiWindowDimensionAggregation.class);
 
-  public enum AggregateOperation {
+  public enum AggregateOperation
+  {
     SUM, AVERAGE
-  };
+  }
 
   private int windowSize = 2;
   private int currentWindow = 0;
@@ -76,7 +78,8 @@ public class MultiWindowDimensionAggregation implements Operator
   /**
    * This is the input port which receives multi dimensional data.
    */
-  public final transient DefaultInputPort<Map<String, Map<String, Number>>> data = new DefaultInputPort<Map<String, Map<String, Number>>>() {
+  public final transient DefaultInputPort<Map<String, Map<String, Number>>> data = new DefaultInputPort<Map<String, Map<String, Number>>>()
+  {
     @Override
     public void process(Map<String, Map<String, Number>> tuple)
     {
@@ -169,12 +172,15 @@ public class MultiWindowDimensionAggregation implements Operator
   @Override
   public void setup(OperatorContext arg0)
   {
-    if (arg0 != null)
+    if (arg0 != null) {
       applicationWindowSize = arg0.getValue(OperatorContext.APPLICATION_WINDOW_COUNT);
-    if (cacheOject == null)
+    }
+    if (cacheOject == null) {
       cacheOject = new HashMap<Integer, Map<String, Map<String, Number>>>(windowSize);
-    if (outputMap == null)
+    }
+    if (outputMap == null) {
       outputMap = new HashMap<String, Map<String, KeyValPair<MutableDouble, Integer>>>();
+    }
     setUpPatternList();
 
   }
@@ -238,8 +244,9 @@ public class MultiWindowDimensionAggregation implements Operator
       }
     }
     currentWindowMap.clear();
-    if (patternList == null || patternList.isEmpty())
+    if (patternList == null || patternList.isEmpty()) {
       setUpPatternList();
+    }
 
   }
 
@@ -255,12 +262,13 @@ public class MultiWindowDimensionAggregation implements Operator
           outputData.put(e.getKey(), new DimensionObject<String>(keyVal.getKey(), dimensionValObj.getKey()));
         } else if (operationType == AggregateOperation.AVERAGE) {
           if (keyVal.getValue() != 0) {
-            double totalCount = ((double) (totalWindowsOccupied * applicationWindowSize)) / 1000;
+            double totalCount = ((double)(totalWindowsOccupied * applicationWindowSize)) / 1000;
             outputData.put(e.getKey(), new DimensionObject<String>(new MutableDouble(keyVal.getKey().doubleValue() / totalCount), dimensionValObj.getKey()));
           }
         }
-        if (!outputData.isEmpty())
+        if (!outputData.isEmpty()) {
           output.emit(outputData);
+        }
       }
     }
     currentWindow = (currentWindow + 1) % windowSize;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/RegexMatchMapOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/RegexMatchMapOperator.java b/library/src/main/java/com/datatorrent/lib/logs/RegexMatchMapOperator.java
index d64c634..e469b0c 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/RegexMatchMapOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/RegexMatchMapOperator.java
@@ -20,16 +20,19 @@ package com.datatorrent.lib.logs;
 
 import java.util.HashMap;
 import java.util.Map;
-import com.google.code.regexp.Pattern;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.code.regexp.Matcher;
-import com.datatorrent.common.util.BaseOperator;
+import com.google.code.regexp.Pattern;
+
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.api.annotation.Stateless;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator parses unstructured log data into named fields.
@@ -77,7 +80,7 @@ import org.slf4j.LoggerFactory;
  * @since 1.0.5
  */
 @Stateless
-@OperatorAnnotation(partitionable=true)
+@OperatorAnnotation(partitionable = true)
 public class RegexMatchMapOperator extends BaseOperator
 {
   /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java b/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java
index 7f903fc..5f09a4b 100644
--- a/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java
+++ b/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java
@@ -18,10 +18,10 @@
  */
 package com.datatorrent.lib.math;
 
-import com.datatorrent.api.DefaultInputPort;
-
 import java.util.Collection;
 
+import com.datatorrent.api.DefaultInputPort;
+
 /**
  * Aggregates input tuples that are collections of longs and double and emits result on four ports.
  * <p>
@@ -49,54 +49,52 @@ import java.util.Collection;
  * @since 0.3.3
  */
 public abstract class AbstractAggregateCalc<T extends Number> extends
-		AbstractOutput
+    AbstractOutput
 {
-	/**
-	 * Input port, accepts collection of values of type 'T'.
-	 */
-	public final transient DefaultInputPort<Collection<T>> input = new DefaultInputPort<Collection<T>>()
-	{
-		/**
-		 * Aggregate calculation result is only emitted on output port if it is connected.
-		 */
-		@Override
-		public void process(Collection<T> collection)
-		{
-			Double dResult = null;
-			if (doubleResult.isConnected()) {
-				doubleResult.emit(dResult = aggregateDoubles(collection));
-			}
+  /**
+   * Input port, accepts collection of values of type 'T'.
+   */
+  public final transient DefaultInputPort<Collection<T>> input = new DefaultInputPort<Collection<T>>()
+  {
+    /**
+     * Aggregate calculation result is only emitted on output port if it is connected.
+     */
+    @Override
+    public void process(Collection<T> collection)
+    {
+      Double dResult = null;
+      if (doubleResult.isConnected()) {
+        doubleResult.emit(dResult = aggregateDoubles(collection));
+      }
 
-			if (floatResult.isConnected()) {
-				floatResult
-						.emit(dResult == null ? (float) (aggregateDoubles(collection))
-								: dResult.floatValue());
-			}
+      if (floatResult.isConnected()) {
+        floatResult.emit(dResult == null ? (float)(aggregateDoubles(collection)) : dResult.floatValue());
+      }
 
-			Long lResult = null;
-			if (longResult.isConnected()) {
-				longResult.emit(lResult = aggregateLongs(collection));
-			}
+      Long lResult = null;
+      if (longResult.isConnected()) {
+        longResult.emit(lResult = aggregateLongs(collection));
+      }
 
-			if (integerResult.isConnected()) {
-				integerResult.emit(lResult == null ? (int) aggregateLongs(collection)
-						: lResult.intValue());
-			}
-		}
+      if (integerResult.isConnected()) {
+        integerResult.emit(lResult == null ? (int)aggregateLongs(collection)
+            : lResult.intValue());
+      }
+    }
 
-	};
+  };
 
-	/**
-	 * Abstract function to be implemented by sub class, custom calculation on input aggregate.
-	 * @param collection Aggregate of values 
-	 * @return calculated value.
-	 */
-	public abstract long aggregateLongs(Collection<T> collection);
+  /**
+   * Abstract function to be implemented by sub class, custom calculation on input aggregate.
+   * @param collection Aggregate of values
+   * @return calculated value.
+   */
+  public abstract long aggregateLongs(Collection<T> collection);
 
-	/**
-	 * Abstract function to be implemented by sub class, custom calculation on input aggregate.
-	 * @param collection Aggregate of values 
-	 * @return calculated value.
-	 */
-	public abstract double aggregateDoubles(Collection<T> collection);
+  /**
+   * Abstract function to be implemented by sub class, custom calculation on input aggregate.
+   * @param collection Aggregate of values
+   * @return calculated value.
+   */
+  public abstract double aggregateDoubles(Collection<T> collection);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/AbstractOutput.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/AbstractOutput.java b/library/src/main/java/com/datatorrent/lib/math/AbstractOutput.java
index bb387b4..9600021 100644
--- a/library/src/main/java/com/datatorrent/lib/math/AbstractOutput.java
+++ b/library/src/main/java/com/datatorrent/lib/math/AbstractOutput.java
@@ -18,9 +18,9 @@
  */
 package com.datatorrent.lib.math;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * Abstract base operator defining optional double/float/long/integer output port.
@@ -34,27 +34,27 @@ import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
  */
 public abstract class AbstractOutput extends BaseOperator
 {
-	/**
-	 * Double type output.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Double> doubleResult = new DefaultOutputPort<Double>();
+  /**
+   * Double type output.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Double> doubleResult = new DefaultOutputPort<Double>();
 
-	/**
-	 * Float type output.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Float> floatResult = new DefaultOutputPort<Float>();
+  /**
+   * Float type output.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Float> floatResult = new DefaultOutputPort<Float>();
 
-	/**
-	 * Long type output.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Long> longResult = new DefaultOutputPort<Long>();
+  /**
+   * Long type output.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Long> longResult = new DefaultOutputPort<Long>();
 
-	/**
-	 * Integer type output.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Integer> integerResult = new DefaultOutputPort<Integer>();
+  /**
+   * Integer type output.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Integer> integerResult = new DefaultOutputPort<Integer>();
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/AbstractXmlCartesianProduct.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/AbstractXmlCartesianProduct.java b/library/src/main/java/com/datatorrent/lib/math/AbstractXmlCartesianProduct.java
index a6b7ab2..10b6c15 100644
--- a/library/src/main/java/com/datatorrent/lib/math/AbstractXmlCartesianProduct.java
+++ b/library/src/main/java/com/datatorrent/lib/math/AbstractXmlCartesianProduct.java
@@ -18,17 +18,23 @@
  */
 package com.datatorrent.lib.math;
 
-import com.datatorrent.api.Context;
-import com.datatorrent.netlet.util.DTThrowable;
-import com.datatorrent.lib.xml.AbstractXmlDOMOperator;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.validation.constraints.NotNull;
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
+
 import org.w3c.dom.Document;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 
-import javax.validation.constraints.NotNull;
-import javax.xml.xpath.*;
-import java.util.ArrayList;
-import java.util.List;
+import com.datatorrent.api.Context;
+import com.datatorrent.lib.xml.AbstractXmlDOMOperator;
+import com.datatorrent.netlet.util.DTThrowable;
 
 /**
  * An operator that performs a cartesian product between different elements in a xml document.
@@ -146,7 +152,7 @@ public abstract class AbstractXmlCartesianProduct<T> extends AbstractXmlDOMOpera
     try {
       List<String> result = new ArrayList<String>();
       for (CartesianProduct cartesianProduct : cartesianProducts) {
-          cartesianProduct.product(document, result);
+        cartesianProduct.product(document, result);
       }
       processResult(result, tuple);
     } catch (XPathExpressionException e) {
@@ -252,8 +258,11 @@ public abstract class AbstractXmlCartesianProduct<T> extends AbstractXmlDOMOpera
           int balance = 1;
           int i;
           for (i = 1; (i < spec.length()) && (balance > 0); ++i) {
-            if (spec.charAt(i) == ')') balance--;
-            else if (spec.charAt(i) == '(') balance++;
+            if (spec.charAt(i) == ')') {
+              balance--;
+            } else if (spec.charAt(i) == '(') {
+              balance++;
+            }
           }
           if (i == spec.length()) {
             estr = spec.substring(1, spec.length() - 1);
@@ -358,10 +367,10 @@ public abstract class AbstractXmlCartesianProduct<T> extends AbstractXmlDOMOpera
             int chldEdDelIdx = productSpec.length() - 1;
             int chldSepDelIdx;
             if ((productSpec.charAt(chldStDelIdx) == '(') && (productSpec.charAt(chldEdDelIdx) == ')')
-                    && ((chldSepDelIdx = productSpec.indexOf(':')) != -1)) {
+                && ((chldSepDelIdx = productSpec.indexOf(':')) != -1)) {
               String child1Spec = productSpec.substring(chldStDelIdx + 1, chldSepDelIdx);
               String child2Spec = productSpec.substring(chldSepDelIdx + 1, chldEdDelIdx);
-              parentElement = (SimplePathElement) pathElement;
+              parentElement = (SimplePathElement)pathElement;
               childElement1 = pathElementFactory.getSpecable(child1Spec);
               childElement2 = pathElementFactory.getSpecable(child2Spec);
             }
@@ -419,7 +428,7 @@ public abstract class AbstractXmlCartesianProduct<T> extends AbstractXmlDOMOpera
   private List<Node> getNodes(Document document, String path) throws XPathExpressionException
   {
     XPathExpression pathExpr = xpath.compile(path);
-    NodeList nodeList = (NodeList) pathExpr.evaluate(document, XPathConstants.NODESET);
+    NodeList nodeList = (NodeList)pathExpr.evaluate(document, XPathConstants.NODESET);
     List<Node> nodes = new ArrayList<Node>();
     for (int i = 0; i < nodeList.getLength(); ++i) {
       nodes.add(nodeList.item(i));
@@ -459,8 +468,11 @@ public abstract class AbstractXmlCartesianProduct<T> extends AbstractXmlDOMOpera
     String delim = getDelim();
     boolean first = true;
     for (Node node : nodes) {
-      if (!first) sb.append(delim);
-      else first = false;
+      if (!first) {
+        sb.append(delim);
+      } else {
+        first = false;
+      }
       sb.append(getValue(node));
     }
     return sb.toString();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java b/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java
index 03c98d3..91cc9ba 100644
--- a/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java
+++ b/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java
@@ -39,7 +39,8 @@ public abstract class AbstractXmlKeyValueCartesianProduct<T> extends AbstractXml
   }
 
   @Override
-  public boolean isValueNode(Node n) {
+  public boolean isValueNode(Node n)
+  {
     return isTextContainerNode(n);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Average.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Average.java b/library/src/main/java/com/datatorrent/lib/math/Average.java
index d956e05..4dfdf1f 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Average.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Average.java
@@ -44,77 +44,77 @@ import com.datatorrent.lib.util.BaseNumberValueOperator;
  */
 public class Average<V extends Number> extends BaseNumberValueOperator<V>
 {
-	/**
-	 * Input port that takes a number.
-	 */
-	public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
-	{
-		/**
-		 * Computes sum and count with each tuple
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			sums += tuple.doubleValue();
-			counts++;
-		}
-	};
+  /**
+   * Input port that takes a number.
+   */
+  public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
+  {
+    /**
+     * Computes sum and count with each tuple
+     */
+    @Override
+    public void process(V tuple)
+    {
+      sums += tuple.doubleValue();
+      counts++;
+    }
+  };
 
-	/**
-	 * Output port that emits average as a number.
-	 */
-	public final transient DefaultOutputPort<V> average = new DefaultOutputPort<V>();
+  /**
+   * Output port that emits average as a number.
+   */
+  public final transient DefaultOutputPort<V> average = new DefaultOutputPort<V>();
 
-	protected double sums = 0;
-	protected long counts = 0;
+  protected double sums = 0;
+  protected long counts = 0;
 
-	/**
-	 * Emit average.
-	 */
-	@Override
-	public void endWindow()
-	{
-		// May want to send out only if count != 0
-		if (counts != 0) {
-			average.emit(getAverage());
-		}
-		sums = 0;
-		counts = 0;
-	}
+  /**
+   * Emit average.
+   */
+  @Override
+  public void endWindow()
+  {
+    // May want to send out only if count != 0
+    if (counts != 0) {
+      average.emit(getAverage());
+    }
+    sums = 0;
+    counts = 0;
+  }
 
-	/**
-	 * Calculate average based on number type.
-	 */
-	@SuppressWarnings("unchecked")
-	public V getAverage()
-	{
-		if (counts == 0) {
-			return null;
-		}
-		V num = getValue(sums);
-		Number val;
-		switch (getType()) {
-			case DOUBLE:
-				val = new Double(num.doubleValue() / counts);
-				break;
-			case INTEGER:
-				int icount = (int) (num.intValue() / counts);
-				val = new Integer(icount);
-				break;
-			case FLOAT:
-				val = new Float(num.floatValue() / counts);
-				break;
-			case LONG:
-				val = new Long(num.longValue() / counts);
-				break;
-			case SHORT:
-				short scount = (short) (num.shortValue() / counts);
-				val = new Short(scount);
-				break;
-			default:
-				val = new Double(num.doubleValue() / counts);
-				break;
-		}
-		return (V) val;
-	}
+  /**
+   * Calculate average based on number type.
+   */
+  @SuppressWarnings("unchecked")
+  public V getAverage()
+  {
+    if (counts == 0) {
+      return null;
+    }
+    V num = getValue(sums);
+    Number val;
+    switch (getType()) {
+      case DOUBLE:
+        val = new Double(num.doubleValue() / counts);
+        break;
+      case INTEGER:
+        int icount = (int)(num.intValue() / counts);
+        val = new Integer(icount);
+        break;
+      case FLOAT:
+        val = new Float(num.floatValue() / counts);
+        break;
+      case LONG:
+        val = new Long(num.longValue() / counts);
+        break;
+      case SHORT:
+        short scount = (short)(num.shortValue() / counts);
+        val = new Short(scount);
+        break;
+      default:
+        val = new Double(num.doubleValue() / counts);
+        break;
+    }
+    return (V)val;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/AverageKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/AverageKeyVal.java b/library/src/main/java/com/datatorrent/lib/math/AverageKeyVal.java
index e9e7e40..e443780 100644
--- a/library/src/main/java/com/datatorrent/lib/math/AverageKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/math/AverageKeyVal.java
@@ -18,15 +18,17 @@
  */
 package com.datatorrent.lib.math;
 
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.lang.mutable.MutableDouble;
+import org.apache.commons.lang.mutable.MutableLong;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
 import com.datatorrent.lib.util.KeyValPair;
-import java.util.HashMap;
-import java.util.Map;
-import org.apache.commons.lang.mutable.MutableDouble;
-import org.apache.commons.lang.mutable.MutableLong;
 
 /**
  *
@@ -57,84 +59,87 @@ import org.apache.commons.lang.mutable.MutableLong;
  */
 public class AverageKeyVal<K> extends BaseNumberKeyValueOperator<K, Number>
 {
-	// Aggregate sum of all values seen for a key.
-	protected HashMap<K, MutableDouble> sums = new HashMap<K, MutableDouble>();
-	
-	// Count of number of values seen for key.
-	protected HashMap<K, MutableLong> counts = new HashMap<K, MutableLong>();
-	
-	/**
-	 * Input port that takes a key value pair.
-	 */
-	public final transient DefaultInputPort<KeyValPair<K, ? extends Number>> data = new DefaultInputPort<KeyValPair<K, ? extends Number>>()
-	{
-		/**
-		 * Adds the values for each key, counts the number of occurrences of each
-		 * key and computes the average.
-		 */
-		@Override
-		public void process(KeyValPair<K, ? extends Number> tuple)
-		{
-			K key = tuple.getKey();
-			if (!doprocessKey(key)) {
-				return;
-			}
-			MutableDouble val = sums.get(key);
-			if (val == null) {
-				val = new MutableDouble(tuple.getValue().doubleValue());
-			} else {
-				val.add(tuple.getValue().doubleValue());
-			}
-			sums.put(cloneKey(key), val);
+  // Aggregate sum of all values seen for a key.
+  protected HashMap<K, MutableDouble> sums = new HashMap<K, MutableDouble>();
+
+  // Count of number of values seen for key.
+  protected HashMap<K, MutableLong> counts = new HashMap<K, MutableLong>();
+
+  /**
+   * Input port that takes a key value pair.
+   */
+  public final transient DefaultInputPort<KeyValPair<K, ? extends Number>> data = new DefaultInputPort<KeyValPair<K, ? extends Number>>()
+  {
+    /**
+     * Adds the values for each key, counts the number of occurrences of each
+     * key and computes the average.
+     */
+    @Override
+    public void process(KeyValPair<K, ? extends Number> tuple)
+    {
+      K key = tuple.getKey();
+      if (!doprocessKey(key)) {
+        return;
+      }
+      MutableDouble val = sums.get(key);
+      if (val == null) {
+        val = new MutableDouble(tuple.getValue().doubleValue());
+      } else {
+        val.add(tuple.getValue().doubleValue());
+      }
+      sums.put(cloneKey(key), val);
+
+      MutableLong count = counts.get(key);
+      if (count == null) {
+        count = new MutableLong(0);
+        counts.put(cloneKey(key), count);
+      }
+      count.increment();
+    }
+  };
+
+  /**
+   * Double average output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Double>> doubleAverage =
+      new DefaultOutputPort<KeyValPair<K, Double>>();
 
-			MutableLong count = counts.get(key);
-			if (count == null) {
-				count = new MutableLong(0);
-				counts.put(cloneKey(key), count);
-			}
-			count.increment();
-		}
-	};
+  /**
+   * Integer average output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Integer>> intAverage =
+      new DefaultOutputPort<KeyValPair<K, Integer>>();
 
-	/**
-	 * Double average output port. 
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Double>> doubleAverage = new DefaultOutputPort<KeyValPair<K, Double>>();
-	
-	/**
-	 * Integer average output port. 
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Integer>> intAverage = new DefaultOutputPort<KeyValPair<K, Integer>>();
-	
-	/**
-	 * Long average output port. 
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Long>> longAverage = new DefaultOutputPort<KeyValPair<K, Long>>();
+  /**
+   * Long average output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Long>> longAverage =
+      new DefaultOutputPort<KeyValPair<K, Long>>();
 
-	/**
-	 * Emits average for each key in end window. Data is computed during process
-	 * on input port Clears the internal data before return.
-	 */
-	@Override
-	public void endWindow()
-	{
-		for (Map.Entry<K, MutableDouble> e : sums.entrySet()) {
-			K key = e.getKey();
-			double d = e.getValue().doubleValue();
-			if (doubleAverage.isConnected()) {
-				doubleAverage.emit(new KeyValPair<K, Double>(key, d / counts.get(key).doubleValue()));
-			}
-			if (intAverage.isConnected()) {
-				intAverage.emit(new KeyValPair<K, Integer>(key, (int) d));
-			}
-			if (longAverage.isConnected()) {
-				longAverage.emit(new KeyValPair<K, Long>(key, (long) d));
-			}
-		}
-		sums.clear();
-		counts.clear();
-	}
+  /**
+   * Emits average for each key in end window. Data is computed during process
+   * on input port Clears the internal data before return.
+   */
+  @Override
+  public void endWindow()
+  {
+    for (Map.Entry<K, MutableDouble> e : sums.entrySet()) {
+      K key = e.getKey();
+      double d = e.getValue().doubleValue();
+      if (doubleAverage.isConnected()) {
+        doubleAverage.emit(new KeyValPair<K, Double>(key, d / counts.get(key).doubleValue()));
+      }
+      if (intAverage.isConnected()) {
+        intAverage.emit(new KeyValPair<K, Integer>(key, (int)d));
+      }
+      if (longAverage.isConnected()) {
+        longAverage.emit(new KeyValPair<K, Long>(key, (long)d));
+      }
+    }
+    sums.clear();
+    counts.clear();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Change.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Change.java b/library/src/main/java/com/datatorrent/lib/math/Change.java
index 628839f..57bad6b 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Change.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Change.java
@@ -61,57 +61,57 @@ import com.datatorrent.lib.util.BaseNumberValueOperator;
 public class Change<V extends Number> extends BaseNumberValueOperator<V>
 {
         /**
-	 * Input data port that takes a number.
-	 */
-	public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
-	{
-		/**
-		 * Process each key, compute change or percent, and emit it.
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			if (baseValue != 0) { // Avoid divide by zero, Emit an error tuple?
-				double cval = tuple.doubleValue() - baseValue;
-				change.emit(getValue(cval));
-				percent.emit((cval / baseValue) * 100);
-			}
-		}
-	};
+   * Input data port that takes a number.
+   */
+  public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
+  {
+    /**
+     * Process each key, compute change or percent, and emit it.
+     */
+    @Override
+    public void process(V tuple)
+    {
+      if (baseValue != 0) { // Avoid divide by zero, Emit an error tuple?
+        double cval = tuple.doubleValue() - baseValue;
+        change.emit(getValue(cval));
+        percent.emit((cval / baseValue) * 100);
+      }
+    }
+  };
         
         /**
-	 * Input port that takes a number&nbsp; It stores the value for base comparison. 
-	 */
-	public final transient DefaultInputPort<V> base = new DefaultInputPort<V>()
-	{
-		/**
-		 * Process each key to store the value. If same key appears again update
-		 * with latest value.
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			if (tuple.doubleValue() != 0.0) { // Avoid divide by zero, Emit an error
-																				// tuple?
-				baseValue = tuple.doubleValue();
-			}
-		}
-	};
-	
-	/**
-	 * Output port that emits change in value compared to base value.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<V> change = new DefaultOutputPort<V>();
-	
-	/**
-	 * Output port that emits percent change in data value compared to base value.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Double> percent = new DefaultOutputPort<Double>();
-	
-	/**
-	 * baseValue is a state full field. It is retained across windows.
-	 */
-	private double baseValue = 0;
+   * Input port that takes a number&nbsp; It stores the value for base comparison.
+   */
+  public final transient DefaultInputPort<V> base = new DefaultInputPort<V>()
+  {
+    /**
+     * Process each key to store the value. If same key appears again update
+     * with latest value.
+     */
+    @Override
+    public void process(V tuple)
+    {
+      if (tuple.doubleValue() != 0.0) { // Avoid divide by zero, Emit an error
+                                        // tuple?
+        baseValue = tuple.doubleValue();
+      }
+    }
+  };
+
+  /**
+   * Output port that emits change in value compared to base value.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<V> change = new DefaultOutputPort<V>();
+
+  /**
+   * Output port that emits percent change in data value compared to base value.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Double> percent = new DefaultOutputPort<Double>();
+
+  /**
+   * baseValue is a state full field. It is retained across windows.
+   */
+  private double baseValue = 0;
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/ChangeAlert.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/ChangeAlert.java b/library/src/main/java/com/datatorrent/lib/math/ChangeAlert.java
index 01a040d..3c48016 100644
--- a/library/src/main/java/com/datatorrent/lib/math/ChangeAlert.java
+++ b/library/src/main/java/com/datatorrent/lib/math/ChangeAlert.java
@@ -53,66 +53,66 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 public class ChangeAlert<V extends Number> extends BaseNumberValueOperator<V>
 {
-	/**
-	 * Input port that takes in a number.
-	 */
-	public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
-	{
-		/**
-		 * Process each key, compute change or percent, and emit it. If we get 0 as
-		 * tuple next will be skipped.
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			double tval = tuple.doubleValue();
-			if (baseValue == 0) { // Avoid divide by zero, Emit an error tuple?
-				baseValue = tval;
-				return;
-			}
-			double change = tval - baseValue;
-			double percent = (change / baseValue) * 100;
-			if (percent < 0.0) {
-				percent = 0.0 - percent;
-			}
-			if (percent > percentThreshold) {
-				KeyValPair<V, Double> kv = new KeyValPair<V, Double>(cloneKey(tuple),
-						percent);
-				alert.emit(kv);
-			}
-			baseValue = tval;
-		}
-	};
+  /**
+   * Input port that takes in a number.
+   */
+  public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
+  {
+    /**
+     * Process each key, compute change or percent, and emit it. If we get 0 as
+     * tuple next will be skipped.
+     */
+    @Override
+    public void process(V tuple)
+    {
+      double tval = tuple.doubleValue();
+      if (baseValue == 0) { // Avoid divide by zero, Emit an error tuple?
+        baseValue = tval;
+        return;
+      }
+      double change = tval - baseValue;
+      double percent = (change / baseValue) * 100;
+      if (percent < 0.0) {
+        percent = 0.0 - percent;
+      }
+      if (percent > percentThreshold) {
+        KeyValPair<V, Double> kv = new KeyValPair<V, Double>(cloneKey(tuple),
+            percent);
+        alert.emit(kv);
+      }
+      baseValue = tval;
+    }
+  };
 
 
-	/**
-	 * Output port which emits a key value pair.
-	 */
-	public final transient DefaultOutputPort<KeyValPair<V, Double>> alert = new DefaultOutputPort<KeyValPair<V, Double>>();
+  /**
+   * Output port which emits a key value pair.
+   */
+  public final transient DefaultOutputPort<KeyValPair<V, Double>> alert = new DefaultOutputPort<KeyValPair<V, Double>>();
 
-	/**
-	 * baseValue is a state full field. It is retained across windows
-	 */
-	private double baseValue = 0;
-	@Min(1)
-	private double percentThreshold = 0.0;
+  /**
+   * baseValue is a state full field. It is retained across windows
+   */
+  private double baseValue = 0;
+  @Min(1)
+  private double percentThreshold = 0.0;
 
-	/**
-	 * getter function for threshold value
-	 *
-	 * @return threshold value
-	 */
-	@Min(1)
-	public double getPercentThreshold()
-	{
-		return percentThreshold;
-	}
+  /**
+   * getter function for threshold value
+   *
+   * @return threshold value
+   */
+  @Min(1)
+  public double getPercentThreshold()
+  {
+    return percentThreshold;
+  }
 
-	/**
-	 * setter function for threshold value
-	 */
-	public void setPercentThreshold(double d)
-	{
-		percentThreshold = d;
-	}
+  /**
+   * setter function for threshold value
+   */
+  public void setPercentThreshold(double d)
+  {
+    percentThreshold = d;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/ChangeAlertKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/ChangeAlertKeyVal.java b/library/src/main/java/com/datatorrent/lib/math/ChangeAlertKeyVal.java
index 43e098f..b0d2e77 100644
--- a/library/src/main/java/com/datatorrent/lib/math/ChangeAlertKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/math/ChangeAlertKeyVal.java
@@ -52,78 +52,78 @@ import com.datatorrent.lib.util.KeyValPair;
  * @since 0.3.3
  */
 public class ChangeAlertKeyVal<K, V extends Number> extends
-		BaseNumberKeyValueOperator<K, V>
+    BaseNumberKeyValueOperator<K, V>
 {
-	/**
-	 * Base map is a StateFull field. It is retained across windows
-	 */
-	private HashMap<K, MutableDouble> basemap = new HashMap<K, MutableDouble>();
+  /**
+   * Base map is a StateFull field. It is retained across windows
+   */
+  private HashMap<K, MutableDouble> basemap = new HashMap<K, MutableDouble>();
 
-	/**
-	 * Input data port that takes a key value pair.
-	 */
-	public final transient DefaultInputPort<KeyValPair<K, V>> data = new DefaultInputPort<KeyValPair<K, V>>()
-	{
-		/**
-		 * Process each key, compute change or percent, and emit it.
-		 */
-		@Override
-		public void process(KeyValPair<K, V> tuple)
-		{
-			K key = tuple.getKey();
-			double tval = tuple.getValue().doubleValue();
-			MutableDouble val = basemap.get(key);
-			if (!doprocessKey(key)) {
-				return;
-			}
-			if (val == null) { // Only process keys that are in the basemap
-				val = new MutableDouble(tval);
-				basemap.put(cloneKey(key), val);
-				return;
-			}
-			double change = tval - val.doubleValue();
-			double percent = (change / val.doubleValue()) * 100;
-			if (percent < 0.0) {
-				percent = 0.0 - percent;
-			}
-			if (percent > percentThreshold) {
-				KeyValPair<V, Double> dmap = new KeyValPair<V, Double>(
-						cloneValue(tuple.getValue()), percent);
-				KeyValPair<K, KeyValPair<V, Double>> otuple = new KeyValPair<K, KeyValPair<V, Double>>(
-						cloneKey(key), dmap);
-				alert.emit(otuple);
-			}
-			val.setValue(tval);
-		}
-	};
+  /**
+   * Input data port that takes a key value pair.
+   */
+  public final transient DefaultInputPort<KeyValPair<K, V>> data = new DefaultInputPort<KeyValPair<K, V>>()
+  {
+    /**
+     * Process each key, compute change or percent, and emit it.
+     */
+    @Override
+    public void process(KeyValPair<K, V> tuple)
+    {
+      K key = tuple.getKey();
+      double tval = tuple.getValue().doubleValue();
+      MutableDouble val = basemap.get(key);
+      if (!doprocessKey(key)) {
+        return;
+      }
+      if (val == null) { // Only process keys that are in the basemap
+        val = new MutableDouble(tval);
+        basemap.put(cloneKey(key), val);
+        return;
+      }
+      double change = tval - val.doubleValue();
+      double percent = (change / val.doubleValue()) * 100;
+      if (percent < 0.0) {
+        percent = 0.0 - percent;
+      }
+      if (percent > percentThreshold) {
+        KeyValPair<V, Double> dmap = new KeyValPair<V, Double>(
+            cloneValue(tuple.getValue()), percent);
+        KeyValPair<K, KeyValPair<V, Double>> otuple = new KeyValPair<K, KeyValPair<V, Double>>(
+            cloneKey(key), dmap);
+        alert.emit(otuple);
+      }
+      val.setValue(tval);
+    }
+  };
 
-	/**
-	 * Key,Percent Change output port.
-	 */
-	public final transient DefaultOutputPort<KeyValPair<K, KeyValPair<V, Double>>> alert = new DefaultOutputPort<KeyValPair<K, KeyValPair<V, Double>>>();
+  /**
+   * Key,Percent Change output port.
+   */
+  public final transient DefaultOutputPort<KeyValPair<K, KeyValPair<V, Double>>> alert = new DefaultOutputPort<KeyValPair<K, KeyValPair<V, Double>>>();
 
-	/**
-	 * Alert thresh hold percentage set by application.
-	 */
-	@Min(1)
-	private double percentThreshold = 0.0;
+  /**
+   * Alert thresh hold percentage set by application.
+   */
+  @Min(1)
+  private double percentThreshold = 0.0;
 
-	/**
-	 * getter function for threshold value
-	 *
-	 * @return threshold value
-	 */
-	@Min(1)
-	public double getPercentThreshold()
-	{
-		return percentThreshold;
-	}
+  /**
+   * getter function for threshold value
+   *
+   * @return threshold value
+   */
+  @Min(1)
+  public double getPercentThreshold()
+  {
+    return percentThreshold;
+  }
 
-	/**
-	 * setter function for threshold value
-	 */
-	public void setPercentThreshold(double d)
-	{
-		percentThreshold = d;
-	}
+  /**
+   * setter function for threshold value
+   */
+  public void setPercentThreshold(double d)
+  {
+    percentThreshold = d;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/ChangeAlertMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/ChangeAlertMap.java b/library/src/main/java/com/datatorrent/lib/math/ChangeAlertMap.java
index ebf16d1..e212a2d 100644
--- a/library/src/main/java/com/datatorrent/lib/math/ChangeAlertMap.java
+++ b/library/src/main/java/com/datatorrent/lib/math/ChangeAlertMap.java
@@ -74,7 +74,7 @@ public class ChangeAlertMap<K, V extends Number> extends BaseNumberKeyValueOpera
           continue;
         }
         double change = e.getValue().doubleValue() - val.doubleValue();
-        double percent = (change/val.doubleValue())*100;
+        double percent = (change / val.doubleValue()) * 100;
         if (percent < 0.0) {
           percent = 0.0 - percent;
         }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/ChangeKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/ChangeKeyVal.java b/library/src/main/java/com/datatorrent/lib/math/ChangeKeyVal.java
index 2e406ec..3f77052 100644
--- a/library/src/main/java/com/datatorrent/lib/math/ChangeKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/math/ChangeKeyVal.java
@@ -54,8 +54,7 @@ import com.datatorrent.lib.util.KeyValPair;
  * @tags change, key value
  * @since 0.3.3
  */
-public class ChangeKeyVal<K, V extends Number> extends
-  BaseNumberKeyValueOperator<K, V>
+public class ChangeKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K, V>
 {
   /**
    * basemap is a stateful field. It is retained across windows
@@ -81,8 +80,7 @@ public class ChangeKeyVal<K, V extends Number> extends
       if (bval != null) { // Only process keys that are in the basemap
         double cval = tuple.getValue().doubleValue() - bval.doubleValue();
         change.emit(new KeyValPair<K, V>(cloneKey(key), getValue(cval)));
-        percent.emit(new KeyValPair<K, Double>(cloneKey(key), (cval / bval
-          .doubleValue()) * 100));
+        percent.emit(new KeyValPair<K, Double>(cloneKey(key), (cval / bval.doubleValue()) * 100));
       }
     }
   };

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/CompareExceptMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/CompareExceptMap.java b/library/src/main/java/com/datatorrent/lib/math/CompareExceptMap.java
index 0573e3e..66bd7da 100644
--- a/library/src/main/java/com/datatorrent/lib/math/CompareExceptMap.java
+++ b/library/src/main/java/com/datatorrent/lib/math/CompareExceptMap.java
@@ -18,13 +18,14 @@
  */
 package com.datatorrent.lib.math;
 
+import java.util.HashMap;
+import java.util.Map;
+
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.api.annotation.Stateless;
 import com.datatorrent.lib.algo.MatchMap;
 import com.datatorrent.lib.util.UnifierHashMap;
-import java.util.HashMap;
-import java.util.Map;
 
 /**
  * Operator compares based on the property "key", "value", and "compare".
@@ -86,13 +87,13 @@ public class CompareExceptMap<K, V extends Number> extends MatchMap<K, V>
   /**
    * Output port that emits a hashmap of matched tuples after comparison.
    */
-  @OutputPortFieldAnnotation(optional=true)
+  @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<HashMap<K, V>> compare = match;
   
   /**
    * Output port that emits a hashmap of non matching tuples after comparison.
    */
-  @OutputPortFieldAnnotation(optional=true)
+  @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<HashMap<K, V>> except = new DefaultOutputPort<HashMap<K, V>>()
   {
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/CompareMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/CompareMap.java b/library/src/main/java/com/datatorrent/lib/math/CompareMap.java
index 540f756..3636207 100644
--- a/library/src/main/java/com/datatorrent/lib/math/CompareMap.java
+++ b/library/src/main/java/com/datatorrent/lib/math/CompareMap.java
@@ -18,10 +18,11 @@
  */
 package com.datatorrent.lib.math;
 
+import java.util.HashMap;
+
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.Stateless;
 import com.datatorrent.lib.algo.MatchMap;
-import java.util.HashMap;
 
 /**
  * This operator compares tuples subclassed from Number based on the property "key", "value", and "cmp", and matching tuples are emitted.
@@ -78,8 +79,8 @@ import java.util.HashMap;
 @Stateless
 public class CompareMap<K, V extends Number> extends MatchMap<K,V>
 {
-    /**
-     * Output port that emits a hashmap of matching number tuples after comparison.
-     */
-    public final transient DefaultOutputPort<HashMap<K, V>> compare = match;
+  /**
+   * Output port that emits a hashmap of matching number tuples after comparison.
+   */
+  public final transient DefaultOutputPort<HashMap<K, V>> compare = match;
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/CountKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/CountKeyVal.java b/library/src/main/java/com/datatorrent/lib/math/CountKeyVal.java
index 64c5029..d593020 100644
--- a/library/src/main/java/com/datatorrent/lib/math/CountKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/math/CountKeyVal.java
@@ -50,65 +50,65 @@ import com.datatorrent.lib.util.UnifierCountOccurKey;
 public class CountKeyVal<K, V> extends BaseKeyValueOperator<K, V>
 {
 
-	/**
-	 * Key occurrence count map.
-	 */
-	protected HashMap<K, MutableInt> counts = new HashMap<K, MutableInt>();
+  /**
+   * Key occurrence count map.
+   */
+  protected HashMap<K, MutableInt> counts = new HashMap<K, MutableInt>();
 
-	/**
-	 * Input data port that takes key value pair.
-	 */
-	public final transient DefaultInputPort<KeyValPair<K, V>> data = new DefaultInputPort<KeyValPair<K, V>>()
-	{
-		/**
-		 * For each tuple (a key value pair): Adds the values for each key, Counts
-		 * the number of occurrence of each key
-		 */
-		@Override
-		public void process(KeyValPair<K, V> tuple)
-		{
-			K key = tuple.getKey();
-			MutableInt count = counts.get(key);
-			if (count == null) {
-				count = new MutableInt(0);
-				counts.put(cloneKey(key), count);
-			}
-			count.increment();
-		}
+  /**
+   * Input data port that takes key value pair.
+   */
+  public final transient DefaultInputPort<KeyValPair<K, V>> data = new DefaultInputPort<KeyValPair<K, V>>()
+  {
+    /**
+     * For each tuple (a key value pair): Adds the values for each key, Counts
+     * the number of occurrence of each key
+     */
+    @Override
+    public void process(KeyValPair<K, V> tuple)
+    {
+      K key = tuple.getKey();
+      MutableInt count = counts.get(key);
+      if (count == null) {
+        count = new MutableInt(0);
+        counts.put(cloneKey(key), count);
+      }
+      count.increment();
+    }
 
-		@Override
-		public StreamCodec<KeyValPair<K, V>> getStreamCodec()
-		{
-			return getKeyValPairStreamCodec();
-		}
-	};
+    @Override
+    public StreamCodec<KeyValPair<K, V>> getStreamCodec()
+    {
+      return getKeyValPairStreamCodec();
+    }
+  };
 
-	/**
-	 * Key, occurrence value pair output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Integer>> count = new DefaultOutputPort<KeyValPair<K, Integer>>()
-	{
-		@Override
-		public UnifierCountOccurKey<K> getUnifier()
-		{
-			return new UnifierCountOccurKey<K>();
-		}
-	};
+  /**
+   * Key, occurrence value pair output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Integer>> count = new DefaultOutputPort<KeyValPair<K, Integer>>()
+  {
+    @Override
+    public UnifierCountOccurKey<K> getUnifier()
+    {
+      return new UnifierCountOccurKey<K>();
+    }
+  };
 
-	/**
-	 * Emits on all ports that are connected. Data is computed during process on
-	 * input port and endWindow just emits it for each key. Clears the internal
-	 * data if resetAtEndWindow is true.
-	 */
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	@Override
-	public void endWindow()
-	{
-		for (Map.Entry<K, MutableInt> e : counts.entrySet()) {
-			count.emit(new KeyValPair(e.getKey(),
-					new Integer(e.getValue().intValue())));
-		}
-		counts.clear();
-	}
+  /**
+   * Emits on all ports that are connected. Data is computed during process on
+   * input port and endWindow just emits it for each key. Clears the internal
+   * data if resetAtEndWindow is true.
+   */
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  @Override
+  public void endWindow()
+  {
+    for (Map.Entry<K, MutableInt> e : counts.entrySet()) {
+      count.emit(new KeyValPair(e.getKey(),
+          new Integer(e.getValue().intValue())));
+    }
+    counts.clear();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Division.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Division.java b/library/src/main/java/com/datatorrent/lib/math/Division.java
index 5bbb9a9..d05af18 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Division.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Division.java
@@ -20,10 +20,10 @@ package com.datatorrent.lib.math;
 
 import java.util.ArrayList;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator does division metric on consecutive tuples on ports.
@@ -54,9 +54,9 @@ import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
  */
 public class Division extends BaseOperator
 {
-	/**
-	 * Array to store numerator inputs during window.
-	 */
+  /**
+   * Array to store numerator inputs during window.
+   */
   private ArrayList<Number> numer = new ArrayList<Number>();
   
   /**
@@ -83,7 +83,7 @@ public class Division extends BaseOperator
         if (loc > numer.size()) {
           loc = numer.size();
         }
-        emit(numer.get(loc-1), denom.get(loc-1));
+        emit(numer.get(loc - 1), denom.get(loc - 1));
         index++;
       }
     }
@@ -107,7 +107,7 @@ public class Division extends BaseOperator
         if (loc > numer.size()) {
           loc = numer.size();
         }
-        emit(numer.get(loc-1), denom.get(loc-1));
+        emit(numer.get(loc - 1), denom.get(loc - 1));
         index++;
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/ExceptMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/ExceptMap.java b/library/src/main/java/com/datatorrent/lib/math/ExceptMap.java
index 84b10b8..ddef880 100644
--- a/library/src/main/java/com/datatorrent/lib/math/ExceptMap.java
+++ b/library/src/main/java/com/datatorrent/lib/math/ExceptMap.java
@@ -68,35 +68,35 @@ public class ExceptMap<K, V extends Number> extends MatchMap<K, V>
         /**
          * Output port that emits non matching number tuples.
          */
-	public final transient DefaultOutputPort<HashMap<K, V>> except = new DefaultOutputPort<HashMap<K, V>>()
-	{
-		@Override
-		public Unifier<HashMap<K, V>> getUnifier()
-		{
-			return new UnifierHashMap<K, V>();
-		}
-	};
+  public final transient DefaultOutputPort<HashMap<K, V>> except = new DefaultOutputPort<HashMap<K, V>>()
+  {
+    @Override
+    public Unifier<HashMap<K, V>> getUnifier()
+    {
+      return new UnifierHashMap<K, V>();
+    }
+  };
 
-	/**
-	 * Does nothing. Overrides base as call super.tupleMatched() would emit the
-	 * tuple
-	 * 
-	 * @param tuple
-	 */
-	@Override
-	public void tupleMatched(Map<K, V> tuple)
-	{
-	}
+  /**
+   * Does nothing. Overrides base as call super.tupleMatched() would emit the
+   * tuple
+   *
+   * @param tuple
+   */
+  @Override
+  public void tupleMatched(Map<K, V> tuple)
+  {
+  }
 
-	/**
-	 * Emits the tuple. Calls cloneTuple to get a copy, allowing users to override
-	 * in case objects are mutable
-	 * 
-	 * @param tuple
-	 */
-	@Override
-	public void tupleNotMatched(Map<K, V> tuple)
-	{
-		except.emit(cloneTuple(tuple));
-	}
+  /**
+   * Emits the tuple. Calls cloneTuple to get a copy, allowing users to override
+   * in case objects are mutable
+   *
+   * @param tuple
+   */
+  @Override
+  public void tupleNotMatched(Map<K, V> tuple)
+  {
+    except.emit(cloneTuple(tuple));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/LogicalCompare.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/LogicalCompare.java b/library/src/main/java/com/datatorrent/lib/math/LogicalCompare.java
index 0b7e036..41ce5a0 100644
--- a/library/src/main/java/com/datatorrent/lib/math/LogicalCompare.java
+++ b/library/src/main/java/com/datatorrent/lib/math/LogicalCompare.java
@@ -18,10 +18,10 @@
  */
 package com.datatorrent.lib.math;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.common.util.Pair;
 
 /**
@@ -29,7 +29,7 @@ import com.datatorrent.common.util.Pair;
  * <p>
  * If the first value is equal to second value, then the pair is emitted on equalTo, greaterThanEqualTo, and lessThanEqualTo ports.
  * If the first value is less than second value, then the pair is emitted on notEqualTo, lessThan and lessThanEqualTo ports.
- * If the first value is greater than second value, then the pair is emitted on notEqualTo, greaterThan and greaterThanEqualTo ports. 
+ * If the first value is greater than second value, then the pair is emitted on notEqualTo, greaterThan and greaterThanEqualTo ports.
  * This is a pass through operator.
  * <br>
  * StateFull : No, output is computed during current window. <br>
@@ -51,61 +51,61 @@ import com.datatorrent.common.util.Pair;
  */
 @Stateless
 public abstract class LogicalCompare<T extends Comparable<? super T>> extends
-		BaseOperator
+    BaseOperator
 {
-	/**
-	 * Input port that takes a key, value pair for comparison.
-	 */
-	public final transient DefaultInputPort<Pair<T, T>> input = new DefaultInputPort<Pair<T, T>>()
-	{
-		@Override
-		public void process(Pair<T, T> tuple)
-		{
-			int i = tuple.first.compareTo(tuple.second);
-			if (i > 0) {
-				greaterThan.emit(tuple);
-				greaterThanOrEqualTo.emit(tuple);
-				notEqualTo.emit(tuple);
-			} else if (i < 0) {
-				lessThan.emit(tuple);
-				lessThanOrEqualTo.emit(tuple);
-				notEqualTo.emit(tuple);
-			} else {
-				equalTo.emit(tuple);
-				lessThanOrEqualTo.emit(tuple);
-				greaterThanOrEqualTo.emit(tuple);
-			}
-		}
+  /**
+   * Input port that takes a key, value pair for comparison.
+   */
+  public final transient DefaultInputPort<Pair<T, T>> input = new DefaultInputPort<Pair<T, T>>()
+  {
+    @Override
+    public void process(Pair<T, T> tuple)
+    {
+      int i = tuple.first.compareTo(tuple.second);
+      if (i > 0) {
+        greaterThan.emit(tuple);
+        greaterThanOrEqualTo.emit(tuple);
+        notEqualTo.emit(tuple);
+      } else if (i < 0) {
+        lessThan.emit(tuple);
+        lessThanOrEqualTo.emit(tuple);
+        notEqualTo.emit(tuple);
+      } else {
+        equalTo.emit(tuple);
+        lessThanOrEqualTo.emit(tuple);
+        greaterThanOrEqualTo.emit(tuple);
+      }
+    }
 
-	};
+  };
 
-	/**
-	 * Equal output port.
-	 */
-	public final transient DefaultOutputPort<Pair<T, T>> equalTo = new DefaultOutputPort<Pair<T, T>>();
+  /**
+   * Equal output port.
+   */
+  public final transient DefaultOutputPort<Pair<T, T>> equalTo = new DefaultOutputPort<Pair<T, T>>();
 
-	/**
-	 * Not Equal output port.
-	 */
-	public final transient DefaultOutputPort<Pair<T, T>> notEqualTo = new DefaultOutputPort<Pair<T, T>>();
+  /**
+   * Not Equal output port.
+   */
+  public final transient DefaultOutputPort<Pair<T, T>> notEqualTo = new DefaultOutputPort<Pair<T, T>>();
 
-	/**
-	 * Less than output port.
-	 */
-	public final transient DefaultOutputPort<Pair<T, T>> lessThan = new DefaultOutputPort<Pair<T, T>>();
+  /**
+   * Less than output port.
+   */
+  public final transient DefaultOutputPort<Pair<T, T>> lessThan = new DefaultOutputPort<Pair<T, T>>();
 
-	/**
-	 * Greater than output port.
-	 */
-	public final transient DefaultOutputPort<Pair<T, T>> greaterThan = new DefaultOutputPort<Pair<T, T>>();
+  /**
+   * Greater than output port.
+   */
+  public final transient DefaultOutputPort<Pair<T, T>> greaterThan = new DefaultOutputPort<Pair<T, T>>();
 
-	/**
-	 * Less than equal to output port.
-	 */
-	public final transient DefaultOutputPort<Pair<T, T>> lessThanOrEqualTo = new DefaultOutputPort<Pair<T, T>>();
+  /**
+   * Less than equal to output port.
+   */
+  public final transient DefaultOutputPort<Pair<T, T>> lessThanOrEqualTo = new DefaultOutputPort<Pair<T, T>>();
 
-	/**
-	 * Greater than equal to output port.
-	 */
-	public final transient DefaultOutputPort<Pair<T, T>> greaterThanOrEqualTo = new DefaultOutputPort<Pair<T, T>>();
+  /**
+   * Greater than equal to output port.
+   */
+  public final transient DefaultOutputPort<Pair<T, T>> greaterThanOrEqualTo = new DefaultOutputPort<Pair<T, T>>();
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/LogicalCompareToConstant.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/LogicalCompareToConstant.java b/library/src/main/java/com/datatorrent/lib/math/LogicalCompareToConstant.java
index 5e98eae..659a287 100644
--- a/library/src/main/java/com/datatorrent/lib/math/LogicalCompareToConstant.java
+++ b/library/src/main/java/com/datatorrent/lib/math/LogicalCompareToConstant.java
@@ -18,10 +18,10 @@
  */
 package com.datatorrent.lib.math;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator does a logical comparison of a constant with a tuple.
@@ -54,78 +54,78 @@ import com.datatorrent.api.annotation.Stateless;
  */
 @Stateless
 public class LogicalCompareToConstant<T extends Comparable<? super T>> extends
-		BaseOperator
+    BaseOperator
 {
 
-	/**
-	 * Compare constant, set by application.
-	 */
-	private T constant;
+  /**
+   * Compare constant, set by application.
+   */
+  private T constant;
 
-	/**
-	 * Input port that takes a comparable to compare it with a constant.
-	 */
-	public final transient DefaultInputPort<T> input = new DefaultInputPort<T>()
-	{
-		@Override
-		public void process(T tuple)
-		{
-			int i = constant.compareTo(tuple);
-			if (i > 0) {
-				greaterThan.emit(tuple);
-				greaterThanOrEqualTo.emit(tuple);
-				notEqualTo.emit(tuple);
-			} else if (i < 0) {
-				lessThan.emit(tuple);
-				lessThanOrEqualTo.emit(tuple);
-				notEqualTo.emit(tuple);
-			} else {
-				equalTo.emit(tuple);
-				lessThanOrEqualTo.emit(tuple);
-				greaterThanOrEqualTo.emit(tuple);
-			}
-		}
+  /**
+   * Input port that takes a comparable to compare it with a constant.
+   */
+  public final transient DefaultInputPort<T> input = new DefaultInputPort<T>()
+  {
+    @Override
+    public void process(T tuple)
+    {
+      int i = constant.compareTo(tuple);
+      if (i > 0) {
+        greaterThan.emit(tuple);
+        greaterThanOrEqualTo.emit(tuple);
+        notEqualTo.emit(tuple);
+      } else if (i < 0) {
+        lessThan.emit(tuple);
+        lessThanOrEqualTo.emit(tuple);
+        notEqualTo.emit(tuple);
+      } else {
+        equalTo.emit(tuple);
+        lessThanOrEqualTo.emit(tuple);
+        greaterThanOrEqualTo.emit(tuple);
+      }
+    }
 
-	};
+  };
 
-	/**
-	 * Equal output port.
-	 */
-	public final transient DefaultOutputPort<T> equalTo = new DefaultOutputPort<T>();
+  /**
+   * Equal output port.
+   */
+  public final transient DefaultOutputPort<T> equalTo = new DefaultOutputPort<T>();
 
-	/**
-	 * Not Equal output port.
-	 */
-	public final transient DefaultOutputPort<T> notEqualTo = new DefaultOutputPort<T>();
+  /**
+   * Not Equal output port.
+   */
+  public final transient DefaultOutputPort<T> notEqualTo = new DefaultOutputPort<T>();
 
-	/**
-	 * Less Than output port.
-	 */
-	public final transient DefaultOutputPort<T> lessThan = new DefaultOutputPort<T>();
+  /**
+   * Less Than output port.
+   */
+  public final transient DefaultOutputPort<T> lessThan = new DefaultOutputPort<T>();
 
-	/**
-	 * Greater than output port.
-	 */
-	public final transient DefaultOutputPort<T> greaterThan = new DefaultOutputPort<T>();
-	public final transient DefaultOutputPort<T> lessThanOrEqualTo = new DefaultOutputPort<T>();
-	public final transient DefaultOutputPort<T> greaterThanOrEqualTo = new DefaultOutputPort<T>();
+  /**
+   * Greater than output port.
+   */
+  public final transient DefaultOutputPort<T> greaterThan = new DefaultOutputPort<T>();
+  public final transient DefaultOutputPort<T> lessThanOrEqualTo = new DefaultOutputPort<T>();
+  public final transient DefaultOutputPort<T> greaterThanOrEqualTo = new DefaultOutputPort<T>();
 
-	/**
-	 * Set constant for comparison.
-	 * 
-	 * @param constant
-	 *          the constant to set
-	 */
-	public void setConstant(T constant)
-	{
-		this.constant = constant;
-	}
+  /**
+   * Set constant for comparison.
+   *
+   * @param constant
+   *          the constant to set
+   */
+  public void setConstant(T constant)
+  {
+    this.constant = constant;
+  }
 
-	/**
-	 * returns the value of constant
-	 */
-	public T getConstant()
-	{
-		return constant;
-	}
+  /**
+   * returns the value of constant
+   */
+  public T getConstant()
+  {
+    return constant;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Margin.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Margin.java b/library/src/main/java/com/datatorrent/lib/math/Margin.java
index 1161ba8..94e15d6 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Margin.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Margin.java
@@ -50,92 +50,92 @@ import com.datatorrent.lib.util.BaseNumberValueOperator;
 @OperatorAnnotation(partitionable = false)
 public class Margin<V extends Number> extends BaseNumberValueOperator<V>
 {
-	/**
-	 * Sum of numerator values.
-	 */
-	protected double nval = 0.0;
+  /**
+   * Sum of numerator values.
+   */
+  protected double nval = 0.0;
 
-	/**
-	 * sum of denominator values.
-	 */
-	protected double dval = 0.0;
+  /**
+   * sum of denominator values.
+   */
+  protected double dval = 0.0;
 
-	/**
-	 * Flag to output margin as percentage.
-	 */
-	protected boolean percent = false;
+  /**
+   * Flag to output margin as percentage.
+   */
+  protected boolean percent = false;
 
-	/**
-	 * Numerator input port.
-	 */
-	public final transient DefaultInputPort<V> numerator = new DefaultInputPort<V>()
-	{
-		/**
-		 * Adds to the numerator value
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			nval += tuple.doubleValue();
-		}
-	};
+  /**
+   * Numerator input port.
+   */
+  public final transient DefaultInputPort<V> numerator = new DefaultInputPort<V>()
+  {
+    /**
+     * Adds to the numerator value
+     */
+    @Override
+    public void process(V tuple)
+    {
+      nval += tuple.doubleValue();
+    }
+  };
 
-	/**
-	 * Denominator input port.
-	 */
-	public final transient DefaultInputPort<V> denominator = new DefaultInputPort<V>()
-	{
-		/**
-		 * Adds to the denominator value
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			dval += tuple.doubleValue();
-		}
-	};
+  /**
+   * Denominator input port.
+   */
+  public final transient DefaultInputPort<V> denominator = new DefaultInputPort<V>()
+  {
+    /**
+     * Adds to the denominator value
+     */
+    @Override
+    public void process(V tuple)
+    {
+      dval += tuple.doubleValue();
+    }
+  };
 
-	/**
-	 * Output margin port.
-	 */
-	public final transient DefaultOutputPort<V> margin = new DefaultOutputPort<V>();
+  /**
+   * Output margin port.
+   */
+  public final transient DefaultOutputPort<V> margin = new DefaultOutputPort<V>();
 
-	/**
-	 * getter function for percent
-	 * 
-	 * @return percent
-	 */
-	public boolean getPercent()
-	{
-		return percent;
-	}
+  /**
+   * getter function for percent
+   *
+   * @return percent
+   */
+  public boolean getPercent()
+  {
+    return percent;
+  }
 
-	/**
-	 * setter function for percent
-	 * 
-	 * @param val
-	 *          sets percent
-	 */
-	public void setPercent(boolean val)
-	{
-		percent = val;
-	}
+  /**
+   * setter function for percent
+   *
+   * @param val
+   *          sets percent
+   */
+  public void setPercent(boolean val)
+  {
+    percent = val;
+  }
 
-	/**
-	 * Generates tuple emits it as long as denomitor is not 0 Clears internal data
-	 */
-	@Override
-	public void endWindow()
-	{
-		if (dval == 0) {
-			return;
-		}
-		double val = 1 - (nval / dval);
-		if (percent) {
-			val = val * 100;
-		}
-		margin.emit(getValue(val));
-		nval = 0.0;
-		dval = 0.0;
-	}
+  /**
+   * Generates tuple emits it as long as denomitor is not 0 Clears internal data
+   */
+  @Override
+  public void endWindow()
+  {
+    if (dval == 0) {
+      return;
+    }
+    double val = 1 - (nval / dval);
+    if (percent) {
+      val = val * 100;
+    }
+    margin.emit(getValue(val));
+    nval = 0.0;
+    dval = 0.0;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/MarginKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/MarginKeyVal.java b/library/src/main/java/com/datatorrent/lib/math/MarginKeyVal.java
index 29d35bc..e3af508 100644
--- a/library/src/main/java/com/datatorrent/lib/math/MarginKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/math/MarginKeyVal.java
@@ -23,12 +23,11 @@ import java.util.Map;
 
 import org.apache.commons.lang.mutable.MutableDouble;
 
-import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
-import com.datatorrent.lib.util.KeyValPair;
-
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.StreamCodec;
+import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
+import com.datatorrent.lib.util.KeyValPair;
 
 /**
  *
@@ -52,135 +51,134 @@ import com.datatorrent.api.StreamCodec;
  * @tags sum, division, numeric, key value
  * @since 0.3.3
  */
-public class MarginKeyVal<K, V extends Number> extends
-		BaseNumberKeyValueOperator<K, V>
+public class MarginKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K, V>
 {
         /**
-	 * Numerator input port that takes a key value pair.
-	 */
-	public final transient DefaultInputPort<KeyValPair<K, V>> numerator = new DefaultInputPort<KeyValPair<K, V>>()
-	{
-		/**
-		 * Adds tuple to the numerator hash.
-		 */
-		@Override
-		public void process(KeyValPair<K, V> tuple)
-		{
-			addTuple(tuple, numerators);
-		}
-
-		/**
-		 * Set StreamCodec used for partitioning.
-		 */
-		@Override
-		public StreamCodec<KeyValPair<K, V>> getStreamCodec()
-		{
-			return getKeyValPairStreamCodec();
-		}
-	};
+   * Numerator input port that takes a key value pair.
+   */
+  public final transient DefaultInputPort<KeyValPair<K, V>> numerator = new DefaultInputPort<KeyValPair<K, V>>()
+  {
+    /**
+     * Adds tuple to the numerator hash.
+     */
+    @Override
+    public void process(KeyValPair<K, V> tuple)
+    {
+      addTuple(tuple, numerators);
+    }
+
+    /**
+     * Set StreamCodec used for partitioning.
+     */
+    @Override
+    public StreamCodec<KeyValPair<K, V>> getStreamCodec()
+    {
+      return getKeyValPairStreamCodec();
+    }
+  };
 
         /**
-	 * Denominator input port that takes a key value pair.
-	 */
-	public final transient DefaultInputPort<KeyValPair<K, V>> denominator = new DefaultInputPort<KeyValPair<K, V>>()
-	{
-		/**
-		 * Adds tuple to the denominator hash.
-		 */
-		@Override
-		public void process(KeyValPair<K, V> tuple)
-		{
-			addTuple(tuple, denominators);
-		}
-
-		/**
-		 * Set StreamCodec used for partitioning.
-		 */
-		@Override
-		public StreamCodec<KeyValPair<K, V>> getStreamCodec()
-		{
-			return getKeyValPairStreamCodec();
-		}
-	};
-
-	/**
-	 * Adds the value for each key.
-	 *
-	 * @param tuple
-	 * @param map
-	 */
-	public void addTuple(KeyValPair<K, V> tuple, Map<K, MutableDouble> map)
-	{
-		K key = tuple.getKey();
-		if (!doprocessKey(key) || (tuple.getValue() == null)) {
-			return;
-		}
-		MutableDouble val = map.get(key);
-		if (val == null) {
-			val = new MutableDouble(0.0);
-			map.put(cloneKey(key), val);
-		}
-		val.add(tuple.getValue().doubleValue());
-	}
+   * Denominator input port that takes a key value pair.
+   */
+  public final transient DefaultInputPort<KeyValPair<K, V>> denominator = new DefaultInputPort<KeyValPair<K, V>>()
+  {
+    /**
+     * Adds tuple to the denominator hash.
+     */
+    @Override
+    public void process(KeyValPair<K, V> tuple)
+    {
+      addTuple(tuple, denominators);
+    }
+
+    /**
+     * Set StreamCodec used for partitioning.
+     */
+    @Override
+    public StreamCodec<KeyValPair<K, V>> getStreamCodec()
+    {
+      return getKeyValPairStreamCodec();
+    }
+  };
+
+  /**
+   * Adds the value for each key.
+   *
+   * @param tuple
+   * @param map
+   */
+  public void addTuple(KeyValPair<K, V> tuple, Map<K, MutableDouble> map)
+  {
+    K key = tuple.getKey();
+    if (!doprocessKey(key) || (tuple.getValue() == null)) {
+      return;
+    }
+    MutableDouble val = map.get(key);
+    if (val == null) {
+      val = new MutableDouble(0.0);
+      map.put(cloneKey(key), val);
+    }
+    val.add(tuple.getValue().doubleValue());
+  }
 
         /**
-	 * Output margin port that emits Key Value pairs.
-	 */
-	public final transient DefaultOutputPort<KeyValPair<K, V>> margin = new DefaultOutputPort<KeyValPair<K, V>>();
-
-	protected HashMap<K, MutableDouble> numerators = new HashMap<K, MutableDouble>();
-	protected HashMap<K, MutableDouble> denominators = new HashMap<K, MutableDouble>();
-	protected boolean percent = false;
-
-	/**
-	 * getter function for percent
-	 *
-	 * @return percent
-	 */
-	public boolean getPercent()
-	{
-		return percent;
-	}
-
-	/**
-	 * setter function for percent
-	 *
-	 * @param val
-	 *          sets percent
-	 */
-	public void setPercent(boolean val)
-	{
-		percent = val;
-	}
-
-	/**
-	 * Generates tuples for each key and emits them. Only keys that are in the
-	 * denominator are iterated on If the key is only in the numerator, it gets
-	 * ignored (cannot do divide by 0) Clears internal data
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Override
-	public void endWindow()
-	{
-		Double val;
-		for (Map.Entry<K, MutableDouble> e : denominators.entrySet()) {
-			K key = e.getKey();
-			MutableDouble nval = numerators.get(key);
-			if (nval == null) {
-				nval = new MutableDouble(0.0);
-			} else {
-				numerators.remove(key); // so that all left over keys can be reported
-			}
-			if (percent) {
-				val = (1 - nval.doubleValue() / e.getValue().doubleValue()) * 100;
-			} else {
-				val = 1 - nval.doubleValue() / e.getValue().doubleValue();
-			}
-
-			margin.emit(new KeyValPair(key, getValue(val.doubleValue())));
-		}
-
-		numerators.clear();
-		denominators.clear();
-	}
+   * Output margin port that emits Key Value pairs.
+   */
+  public final transient DefaultOutputPort<KeyValPair<K, V>> margin = new DefaultOutputPort<KeyValPair<K, V>>();
+
+  protected HashMap<K, MutableDouble> numerators = new HashMap<K, MutableDouble>();
+  protected HashMap<K, MutableDouble> denominators = new HashMap<K, MutableDouble>();
+  protected boolean percent = false;
+
+  /**
+   * getter function for percent
+   *
+   * @return percent
+   */
+  public boolean getPercent()
+  {
+    return percent;
+  }
+
+  /**
+   * setter function for percent
+   *
+   * @param val
+   *          sets percent
+   */
+  public void setPercent(boolean val)
+  {
+    percent = val;
+  }
+
+  /**
+   * Generates tuples for each key and emits them. Only keys that are in the
+   * denominator are iterated on If the key is only in the numerator, it gets
+   * ignored (cannot do divide by 0) Clears internal data
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Override
+  public void endWindow()
+  {
+    Double val;
+    for (Map.Entry<K, MutableDouble> e : denominators.entrySet()) {
+      K key = e.getKey();
+      MutableDouble nval = numerators.get(key);
+      if (nval == null) {
+        nval = new MutableDouble(0.0);
+      } else {
+        numerators.remove(key); // so that all left over keys can be reported
+      }
+      if (percent) {
+        val = (1 - nval.doubleValue() / e.getValue().doubleValue()) * 100;
+      } else {
+        val = 1 - nval.doubleValue() / e.getValue().doubleValue();
+      }
+
+      margin.emit(new KeyValPair(key, getValue(val.doubleValue())));
+    }
+
+    numerators.clear();
+    denominators.clear();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/MarginMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/MarginMap.java b/library/src/main/java/com/datatorrent/lib/math/MarginMap.java
index 2259d85..7ef1f81 100644
--- a/library/src/main/java/com/datatorrent/lib/math/MarginMap.java
+++ b/library/src/main/java/com/datatorrent/lib/math/MarginMap.java
@@ -18,13 +18,15 @@
  */
 package com.datatorrent.lib.math;
 
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.lang.mutable.MutableDouble;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
 import com.datatorrent.lib.util.UnifierHashMap;
-import java.util.HashMap;
-import java.util.Map;
-import org.apache.commons.lang.mutable.MutableDouble;
 
 
 /**
@@ -144,18 +146,16 @@ public class MarginMap<K, V extends Number> extends BaseNumberKeyValueOperator<K
   {
     HashMap<K, V> tuples = new HashMap<K, V>();
     Double val;
-    for (Map.Entry<K, MutableDouble> e: denominators.entrySet()) {
+    for (Map.Entry<K, MutableDouble> e : denominators.entrySet()) {
       MutableDouble nval = numerators.get(e.getKey());
       if (nval == null) {
         nval = new MutableDouble(0.0);
-      }
-      else {
+      } else {
         numerators.remove(e.getKey()); // so that all left over keys can be reported
       }
       if (percent) {
         val = (1 - nval.doubleValue() / e.getValue().doubleValue()) * 100;
-      }
-      else {
+      } else {
         val = 1 - nval.doubleValue() / e.getValue().doubleValue();
       }
       tuples.put(e.getKey(), getValue(val.doubleValue()));

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Max.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Max.java b/library/src/main/java/com/datatorrent/lib/math/Max.java
index e4171f6..8ec8b2f 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Max.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Max.java
@@ -64,8 +64,7 @@ public class Max<V extends Number> extends BaseNumberValueOperator<V> implements
     if (!flag) {
       high = tuple;
       flag = true;
-    }
-    else if (high.doubleValue() < tuple.doubleValue()) {
+    } else if (high.doubleValue() < tuple.doubleValue()) {
       high = tuple;
     }
   }
@@ -74,7 +73,7 @@ public class Max<V extends Number> extends BaseNumberValueOperator<V> implements
    * Max value output port.
    */
   public final transient DefaultOutputPort<V> max = new DefaultOutputPort<V>()
-   {
+  {
     @Override
     public Unifier<V> getUnifier()
     {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/MaxKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/MaxKeyVal.java b/library/src/main/java/com/datatorrent/lib/math/MaxKeyVal.java
index 58a947f..95a0a08 100644
--- a/library/src/main/java/com/datatorrent/lib/math/MaxKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/math/MaxKeyVal.java
@@ -21,12 +21,11 @@ package com.datatorrent.lib.math;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
-import com.datatorrent.lib.util.KeyValPair;
-
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.StreamCodec;
+import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
+import com.datatorrent.lib.util.KeyValPair;
 
 /**
  *
@@ -68,8 +67,7 @@ public class MaxKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K
       if (val == null) {
         val = tval;
         highs.put(cloneKey(key), val);
-      }
-      else if (val.doubleValue() < tval.doubleValue()) {
+      } else if (val.doubleValue() < tval.doubleValue()) {
         highs.put(key, tval);
       }
     }
@@ -97,7 +95,7 @@ public class MaxKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K
    * Clears internal data. Node only works in windowed mode.
    */
   @SuppressWarnings({ "rawtypes", "unchecked" })
-	@Override
+  @Override
   public void endWindow()
   {
     if (!highs.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Min.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Min.java b/library/src/main/java/com/datatorrent/lib/math/Min.java
index 244d990..4b3fa23 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Min.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Min.java
@@ -41,17 +41,17 @@ import com.datatorrent.lib.util.BaseNumberValueOperator;
  */
 public class Min<V extends Number> extends BaseNumberValueOperator<V> implements Unifier<V>
 {
-	/**
-	 * Computed low value.
-	 */
+  /**
+   * Computed low value.
+   */
   protected V low;
   
   // transient field
   protected boolean flag = false;
   
-	 /**
-          * Input port that takes a number and compares to min and stores the new min.
-          */
+  /**
+   * Input port that takes a number and compares to min and stores the new min.
+   */
   public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
   {
     /**
@@ -73,8 +73,7 @@ public class Min<V extends Number> extends BaseNumberValueOperator<V> implements
     if (!flag) {
       low = tuple;
       flag = true;
-    }
-    else if (low.doubleValue() > tuple.doubleValue()) {
+    } else if (low.doubleValue() > tuple.doubleValue()) {
       low = tuple;
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/MinKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/MinKeyVal.java b/library/src/main/java/com/datatorrent/lib/math/MinKeyVal.java
index 5ea710b..2468239 100644
--- a/library/src/main/java/com/datatorrent/lib/math/MinKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/math/MinKeyVal.java
@@ -21,12 +21,11 @@ package com.datatorrent.lib.math;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
-import com.datatorrent.lib.util.KeyValPair;
-
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.StreamCodec;
+import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
+import com.datatorrent.lib.util.KeyValPair;
 
 /**
  *
@@ -48,9 +47,9 @@ import com.datatorrent.api.StreamCodec;
  */
 public class MinKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K, V>
 {
-	/**
-	 * Input port which takes a key vaue pair and updates the value for each key if there is a new min.
-	 */
+  /**
+   * Input port which takes a key vaue pair and updates the value for each key if there is a new min.
+   */
   public final transient DefaultInputPort<KeyValPair<K, V>> data = new DefaultInputPort<KeyValPair<K, V>>()
   {
     /**
@@ -67,8 +66,7 @@ public class MinKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K
       V val = mins.get(key);
       if (val == null) {
         mins.put(cloneKey(key), tval);
-      }
-      else if (val.doubleValue() > tval.doubleValue()) {
+      } else if (val.doubleValue() > tval.doubleValue()) {
         mins.put(key, tval);
       }
     }
@@ -94,7 +92,7 @@ public class MinKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K
    * Clears internal data. Node only works in windowed mode.
    */
   @SuppressWarnings({ "unchecked", "rawtypes" })
-	@Override
+  @Override
   public void endWindow()
   {
     if (!mins.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java b/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java
index c1c70d3..dd56f7f 100644
--- a/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java
+++ b/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java
@@ -18,10 +18,10 @@
  */
 package com.datatorrent.lib.math;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * Multiplies input tuple (Number) by the value of property "multiplier" and emits the result on respective ports.
@@ -51,9 +51,9 @@ import com.datatorrent.api.annotation.Stateless;
 @Stateless
 public class MultiplyByConstant extends BaseOperator
 {
-	/**
-	 * Input number port.
-	 */
+  /**
+   * Input number port.
+   */
   public final transient DefaultInputPort<Number> input = new DefaultInputPort<Number>()
   {
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Quotient.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Quotient.java b/library/src/main/java/com/datatorrent/lib/math/Quotient.java
index d55f205..ed08e86 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Quotient.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Quotient.java
@@ -47,63 +47,63 @@ import com.datatorrent.lib.util.BaseNumberValueOperator;
 @OperatorAnnotation(partitionable = false)
 public class Quotient<V extends Number> extends BaseNumberValueOperator<V>
 {
-	protected double nval = 0.0;
-	protected double dval = 0.0;
-	int mult_by = 1;
+  protected double nval = 0.0;
+  protected double dval = 0.0;
+  int mult_by = 1;
 
-	/**
-	 * Numerator values input port.
-	 */
-	public final transient DefaultInputPort<V> numerator = new DefaultInputPort<V>()
-	{
-		/**
-		 * Adds to the numerator value
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			nval += tuple.doubleValue();
-		}
-	};
+  /**
+   * Numerator values input port.
+   */
+  public final transient DefaultInputPort<V> numerator = new DefaultInputPort<V>()
+  {
+    /**
+     * Adds to the numerator value
+     */
+    @Override
+    public void process(V tuple)
+    {
+      nval += tuple.doubleValue();
+    }
+  };
 
-	/**
-	 * Denominator values input port.
-	 */
-	public final transient DefaultInputPort<V> denominator = new DefaultInputPort<V>()
-	{
-		/**
-		 * Adds to the denominator value
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			dval += tuple.doubleValue();
-		}
-	};
+  /**
+   * Denominator values input port.
+   */
+  public final transient DefaultInputPort<V> denominator = new DefaultInputPort<V>()
+  {
+    /**
+     * Adds to the denominator value
+     */
+    @Override
+    public void process(V tuple)
+    {
+      dval += tuple.doubleValue();
+    }
+  };
 
-	/**
-	 * Quotient output port.
-	 */
-	public final transient DefaultOutputPort<V> quotient = new DefaultOutputPort<V>();
+  /**
+   * Quotient output port.
+   */
+  public final transient DefaultOutputPort<V> quotient = new DefaultOutputPort<V>();
 
-	public void setMult_by(int i)
-	{
-		mult_by = i;
-	}
+  public void setMult_by(int i)
+  {
+    mult_by = i;
+  }
 
-	/**
-	 * Generates tuple emits it as long as denominator is not 0. Clears internal
-	 * data
-	 */
-	@Override
-	public void endWindow()
-	{
-		if (dval == 0) {
-			return;
-		}
-		double val = (nval / dval) * mult_by;
-		quotient.emit(getValue(val));
-		nval = 0.0;
-		dval = 0.0;
-	}
+  /**
+   * Generates tuple emits it as long as denominator is not 0. Clears internal
+   * data
+   */
+  @Override
+  public void endWindow()
+  {
+    if (dval == 0) {
+      return;
+    }
+    double val = (nval / dval) * mult_by;
+    quotient.emit(getValue(val));
+    nval = 0.0;
+    dval = 0.0;
+  }
 }


[16/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/jdbc/JDBCDimensionalOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/jdbc/JDBCDimensionalOutputOperator.java b/library/src/main/java/com/datatorrent/lib/db/jdbc/JDBCDimensionalOutputOperator.java
index 353f1b2..bf0d089 100644
--- a/library/src/main/java/com/datatorrent/lib/db/jdbc/JDBCDimensionalOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/jdbc/JDBCDimensionalOutputOperator.java
@@ -18,29 +18,27 @@
  */
 package com.datatorrent.lib.db.jdbc;
 
-
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
-
 import java.util.List;
 import java.util.Map;
 
 import javax.validation.constraints.Min;
 import javax.validation.constraints.NotNull;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.apex.malhar.lib.dimensions.DimensionsDescriptor;
 import org.apache.apex.malhar.lib.dimensions.DimensionsEvent.Aggregate;
 import org.apache.apex.malhar.lib.dimensions.DimensionsEvent.EventKey;
 import org.apache.apex.malhar.lib.dimensions.aggregator.AggregatorRegistry;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
 import com.datatorrent.api.Context;
-
 import com.datatorrent.lib.appdata.gpo.GPOMutable;
 import com.datatorrent.lib.appdata.schemas.DimensionalConfigurationSchema;
 import com.datatorrent.lib.appdata.schemas.FieldsDescriptor;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStore.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStore.java b/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStore.java
index 835bcdd..89022a3 100644
--- a/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStore.java
+++ b/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStore.java
@@ -60,8 +60,7 @@ public class JdbcNonTransactionalStore extends JdbcTransactionalStore
 
     try {
       connection.setAutoCommit(true);
-    }
-    catch(SQLException e) {
+    } catch (SQLException e) {
       throw new RuntimeException(e);
     }
   }
@@ -71,10 +70,9 @@ public class JdbcNonTransactionalStore extends JdbcTransactionalStore
   {
     Long lastWindowCommit = getCommittedWindowIdHelper(appId, operatorId);
 
-    if(lastWindowCommit == null) {
+    if (lastWindowCommit == null) {
       return -1L;
-    }
-    else {
+    } else {
       return lastWindowCommit;
     }
   }
@@ -87,8 +85,7 @@ public class JdbcNonTransactionalStore extends JdbcTransactionalStore
 
       lastWindowFetchCommand.close();
       lastWindowInsertCommand.close();
-    }
-    catch (SQLException ex) {
+    } catch (SQLException ex) {
       throw new RuntimeException(ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/fileaccess/DTFileReader.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/fileaccess/DTFileReader.java b/library/src/main/java/com/datatorrent/lib/fileaccess/DTFileReader.java
index 7fff4e0..ea8f174 100644
--- a/library/src/main/java/com/datatorrent/lib/fileaccess/DTFileReader.java
+++ b/library/src/main/java/com/datatorrent/lib/fileaccess/DTFileReader.java
@@ -19,7 +19,6 @@
 package com.datatorrent.lib.fileaccess;
 
 import java.io.IOException;
-import java.util.Arrays;
 import java.util.TreeMap;
 
 import org.apache.hadoop.classification.InterfaceStability;
@@ -97,7 +96,9 @@ public class DTFileReader implements FileAccess.FileReader
   @Override
   public boolean next(Slice key, Slice value) throws IOException
   {
-    if (scanner.atEnd()) return false;
+    if (scanner.atEnd()) {
+      return false;
+    }
     Entry en = scanner.entry();
 
     key.buffer = en.getBlockBuffer();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/fileaccess/FileAccessFSImpl.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/fileaccess/FileAccessFSImpl.java b/library/src/main/java/com/datatorrent/lib/fileaccess/FileAccessFSImpl.java
index a9cfe00..7184a82 100644
--- a/library/src/main/java/com/datatorrent/lib/fileaccess/FileAccessFSImpl.java
+++ b/library/src/main/java/com/datatorrent/lib/fileaccess/FileAccessFSImpl.java
@@ -61,7 +61,8 @@ public abstract class FileAccessFSImpl implements FileAccess
     this.basePath = path;
   }
 
-  protected Path getFilePath(long bucketKey, String fileName) {
+  protected Path getFilePath(long bucketKey, String fileName)
+  {
     return new Path(getBucketPath(bucketKey), fileName);
   }
 
@@ -71,7 +72,8 @@ public abstract class FileAccessFSImpl implements FileAccess
   }
 
   @Override
-  public long getFileSize(long bucketKey, String fileName) throws IOException {
+  public long getFileSize(long bucketKey, String fileName) throws IOException
+  {
     return fs.getFileStatus(getFilePath(bucketKey, fileName)).getLen();
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java
index 2a3fd0e..7dfe4e9 100644
--- a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java
+++ b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java
@@ -143,14 +143,14 @@ public abstract class TFileImpl extends FileAccessFSImpl
   
   /**
    * Return {@link TFile} {@link Reader}
-   *
    */
-  public static class DefaultTFileImpl extends TFileImpl{
+  public static class DefaultTFileImpl extends TFileImpl
+  {
     
     @Override
     public FileReader getReader(long bucketKey, String fileName) throws IOException
     {
-      FSDataInputStream fsdis =  getInputStream(bucketKey, fileName);
+      FSDataInputStream fsdis = getInputStream(bucketKey, fileName);
       long fileLength = getFileSize(bucketKey, fileName);
       super.setupConfig(fs.getConf());
       return new TFileReader(fsdis, fileLength, fs.getConf());
@@ -158,17 +158,16 @@ public abstract class TFileImpl extends FileAccessFSImpl
     
   }
   
-  
   /**
    * Return {@link DTFile} {@link org.apache.hadoop.io.file.tfile.DTFile.Reader}
-   *
    */
-  public static class DTFileImpl extends TFileImpl {
+  public static class DTFileImpl extends TFileImpl
+  {
     
     @Override
     public FileReader getReader(long bucketKey, String fileName) throws IOException
     {
-      FSDataInputStream fsdis =  getInputStream(bucketKey, fileName);
+      FSDataInputStream fsdis = getInputStream(bucketKey, fileName);
       long fileLength = getFileSize(bucketKey, fileName);
       super.setupConfig(fs.getConf());
       return new DTFileReader(fsdis, fileLength, fs.getConf());

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/fileaccess/TFileReader.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileReader.java b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileReader.java
index 9ab6f82..0f0c92a 100644
--- a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileReader.java
+++ b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileReader.java
@@ -93,17 +93,20 @@ public class TFileReader implements FileAccess.FileReader
     try {
       return scanner.seekTo(key.buffer, key.offset, key.length);
     } catch (NullPointerException ex) {
-      if (closed)
+      if (closed) {
         throw new IOException("Stream was closed");
-      else
+      } else {
         throw ex;
+      }
     }
   }
 
   @Override
   public boolean next(Slice key, Slice value) throws IOException
   {
-    if (scanner.atEnd()) return false;
+    if (scanner.atEnd()) {
+      return false;
+    }
     Entry en = scanner.entry();
     byte[] rkey = new byte[en.getKeyLength()];
     byte[] rval = new byte[en.getValueLength()];

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java
index 6566ae0..7e9d544 100644
--- a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java
+++ b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java
@@ -37,7 +37,8 @@ public final class TFileWriter implements FileAccess.FileWriter
   
   private FSDataOutputStream fsdos;
   
-  public TFileWriter(FSDataOutputStream stream, int minBlockSize, String compressName, String comparator, Configuration conf) throws IOException
+  public TFileWriter(FSDataOutputStream stream, int minBlockSize, String compressName,
+      String comparator, Configuration conf) throws IOException
   {
     this.fsdos = stream;
     writer = new Writer(stream, minBlockSize, compressName, comparator, conf);
@@ -58,6 +59,9 @@ public final class TFileWriter implements FileAccess.FileWriter
   }
 
   @Override
-  public long getBytesWritten() throws IOException{ return fsdos.getPos(); }
+  public long getBytesWritten() throws IOException
+  {
+    return fsdos.getPos();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java b/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java
index 7d0018e..25c0b96 100644
--- a/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java
+++ b/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java
@@ -40,7 +40,7 @@ import com.datatorrent.lib.converter.Converter;
  * 
  * @displayName Parser
  * @tags parser converter
- * @param <INPUT>
+ * @param <OUTPUT>
  * @since 3.2.0
  */
 @org.apache.hadoop.classification.InterfaceStability.Evolving

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/AbstractHttpGetOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/AbstractHttpGetOperator.java b/library/src/main/java/com/datatorrent/lib/io/AbstractHttpGetOperator.java
index fe9a50f..a84e5c7 100644
--- a/library/src/main/java/com/datatorrent/lib/io/AbstractHttpGetOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/AbstractHttpGetOperator.java
@@ -56,8 +56,7 @@ public abstract class AbstractHttpGetOperator<INPUT, OUTPUT> extends AbstractHtt
     if (output.isConnected()) {
       ClientResponse response = wr.get(ClientResponse.class);
       processResponse(response);
-    }
-    else {
+    } else {
       wr.get(ClientResponse.class);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/AbstractHttpInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/AbstractHttpInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/AbstractHttpInputOperator.java
index d7bb503..40ce4bc 100644
--- a/library/src/main/java/com/datatorrent/lib/io/AbstractHttpInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/AbstractHttpInputOperator.java
@@ -28,12 +28,13 @@ import javax.validation.constraints.NotNull;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
 import com.sun.jersey.api.client.Client;
 import com.sun.jersey.api.client.ClientResponse;
 import com.sun.jersey.api.client.WebResource;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+
 /**
  * This is a base implementation for an HTTP input operator that reads from a given url using the HTTP GET command like an input stream.&nbsp;
  * Subclasses must implement the method which handles the response to the HTTP GET request.
@@ -122,15 +123,13 @@ public abstract class AbstractHttpInputOperator<T> extends SimpleSinglePortInput
 
         ClientResponse response = builder.get(ClientResponse.class);
         processResponse(response);
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         LOG.error("Error reading from " + resource.getURI(), e);
       }
 
       try {
         Thread.sleep(500);
-      }
-      catch (InterruptedException e) {
+      } catch (InterruptedException e) {
         LOG.info("Exiting IO loop {}.", e.toString());
         break;
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/AbstractHttpOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/AbstractHttpOperator.java b/library/src/main/java/com/datatorrent/lib/io/AbstractHttpOperator.java
index d5b8de6..60b123b 100644
--- a/library/src/main/java/com/datatorrent/lib/io/AbstractHttpOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/AbstractHttpOperator.java
@@ -20,14 +20,14 @@ package com.datatorrent.lib.io;
 
 import javax.validation.constraints.NotNull;
 
-import com.sun.jersey.api.client.Client;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.common.util.BaseOperator;
+import com.sun.jersey.api.client.Client;
+
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This is the base implementation for HTTP operators.&nbsp;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/AbstractKeyValueStoreOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/AbstractKeyValueStoreOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/AbstractKeyValueStoreOutputOperator.java
index 835f2a6..1e14fe1 100644
--- a/library/src/main/java/com/datatorrent/lib/io/AbstractKeyValueStoreOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/AbstractKeyValueStoreOutputOperator.java
@@ -24,11 +24,11 @@ import java.util.Map;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context.DAGContext;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.lib.util.KeyValPair;
 
 /**
@@ -62,7 +62,7 @@ public abstract class AbstractKeyValueStoreOutputOperator<K, V> extends BaseOper
    * This input port receives tuples which are maps.
    * Each map may have many key value pairs.
    */
-  @InputPortFieldAnnotation(optional=true)
+  @InputPortFieldAnnotation(optional = true)
   public final transient DefaultInputPort<Map<K, V>> input = new DefaultInputPort<Map<K, V>>()
   {
     @Override
@@ -78,7 +78,7 @@ public abstract class AbstractKeyValueStoreOutputOperator<K, V> extends BaseOper
   /**
    * This input port receives tuples which are individual key value pairs.
    */
-  @InputPortFieldAnnotation(optional=true)
+  @InputPortFieldAnnotation(optional = true)
   public final transient DefaultInputPort<KeyValPair<K, V>> inputInd = new DefaultInputPort<KeyValPair<K, V>>()
   {
     @Override
@@ -141,8 +141,7 @@ public abstract class AbstractKeyValueStoreOutputOperator<K, V> extends BaseOper
         put(getEndWindowKey(), String.valueOf(currentWindowId));
         commitTransaction();
         committedWindowId = currentWindowId;
-      }
-      else {
+      } else {
         LOG.info("Discarding data for window id {} because committed window is {}", currentWindowId, committedWindowId);
       }
     } catch (RuntimeException se) {
@@ -163,7 +162,8 @@ public abstract class AbstractKeyValueStoreOutputOperator<K, V> extends BaseOper
     return "_ew:" + appId + ":" + operatorId;
   }
 
-  private void logException(String message, Exception exception) {
+  private void logException(String message, Exception exception)
+  {
     if (continueOnError != 0) {
       LOG.warn(message, exception);
     } else {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/AbstractSocketInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/AbstractSocketInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/AbstractSocketInputOperator.java
index 4f1e3df..450cdcb 100644
--- a/library/src/main/java/com/datatorrent/lib/io/AbstractSocketInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/AbstractSocketInputOperator.java
@@ -23,7 +23,8 @@ import java.nio.ByteBuffer;
 import java.nio.channels.SelectionKey;
 import java.nio.channels.Selector;
 import java.nio.channels.SocketChannel;
-import java.util.*;
+import java.util.Iterator;
+import java.util.Set;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
 
@@ -159,8 +160,7 @@ public abstract class AbstractSocketInputOperator<T> implements InputOperator, A
       channel.configureBlocking(false);
       channel.connect(new InetSocketAddress(hostname, port));
       channel.register(selector, SelectionKey.OP_CONNECT | SelectionKey.OP_READ);
-    }
-    catch (Exception ex) {
+    } catch (Exception ex) {
       throw new RuntimeException(ex);
     }
     lock = new ReentrantLock();
@@ -176,8 +176,7 @@ public abstract class AbstractSocketInputOperator<T> implements InputOperator, A
       selector.close();
       scanThread.interrupt();
       scanThread.join();
-    }
-    catch (Exception ex) {
+    } catch (Exception ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -200,11 +199,11 @@ public abstract class AbstractSocketInputOperator<T> implements InputOperator, A
             SelectionKey nextKey = keyIterator.next();
             keyIterator.remove();
             if (nextKey.isConnectable()) {
-              SocketChannel sChannel = (SocketChannel) nextKey.channel();
+              SocketChannel sChannel = (SocketChannel)nextKey.channel();
               sChannel.finishConnect();
             }
             if (nextKey.isReadable()) {
-              SocketChannel sChannel = (SocketChannel) nextKey.channel();
+              SocketChannel sChannel = (SocketChannel)nextKey.channel();
               lock.lock();
               acquiredLock = true;
               sChannel.read(byteBuffer);
@@ -215,8 +214,7 @@ public abstract class AbstractSocketInputOperator<T> implements InputOperator, A
           // Sleep for Scan interval
           Thread.sleep(scanIntervalInMilliSeconds);
         }
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         if (acquiredLock) {
           lock.unlock();
         }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/ApacheGenRandomLogs.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/ApacheGenRandomLogs.java b/library/src/main/java/com/datatorrent/lib/io/ApacheGenRandomLogs.java
index d1afc60..84bcb1d 100644
--- a/library/src/main/java/com/datatorrent/lib/io/ApacheGenRandomLogs.java
+++ b/library/src/main/java/com/datatorrent/lib/io/ApacheGenRandomLogs.java
@@ -22,10 +22,10 @@ import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.Random;
 
-import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.InputOperator;
-import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * Generates apache server log entries. The apache access log has the following
@@ -43,7 +43,7 @@ import com.datatorrent.api.Context.OperatorContext;
  * %b - The number of bytes in the response
  * %{Referer} - The referer web site reported by the client, "-" if there is none
  * %{User-agent} - Unique string identifying the client browser e.g.,
- * 							"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36"
+ * "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36"
  *
  * Putting it all together a sample log string looks like :
  * --------------------------------------------------------
@@ -59,153 +59,168 @@ import com.datatorrent.api.Context.OperatorContext;
 @org.apache.hadoop.classification.InterfaceStability.Evolving
 public class ApacheGenRandomLogs extends BaseOperator implements InputOperator
 {
-	/**
+  /**
    * This is the output port which emits generated log strings.
    */
-	public final transient DefaultOutputPort<String> outport = new DefaultOutputPort<String>();
-
-	// server name/ip-address  random variable
-	private Random rand = new Random();
-
-	// Apache date format
-	private static SimpleDateFormat apapcheDateFormat = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss Z");
-
-	// http status codes
-	private static String [] httpStatusCodes = {"100", "101", "200", "201", "202", "203", "204", "205", "206", "300", "301",
-																							"301", "302", "303", "304", "305", "306", "307", "400", "401", "402", "403",
-																							"405", "406", "407", "408", "409", "410", "411", "412", "413", "414",
-																							"415", "416", "417", "500", "501", "502", "503", "504", "505"};
-
-	// possible url string formats
-	private static String[] urlFormats = {
-		"mydomain.com/home.php", "mydomain.com/products.php", "mydomain.com/products.php?productid=%d",
-		"mydomain.com/solutions.php", "mydomain.com/solutions.php?solutionid=%d", "mydomain.com/support.php",
-		"mydomain.com/about.php", "mydomain.com/contactus.php", "mydomain.com/services.php",
-		"mydomain.com/services.php?serviceid=%d", "mydomain.com/partners.php", "mydomain.com/partners.php?partnerid=%d"
-	};
-
-	// browser id
-	private static String[] browserIds = {
-		"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:20.0) Gecko/%d Firefox/20.0",
-		"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/%d Firefox/18.0",
-		"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/%d Fedora/1.0.4-4 Firefox/1.0.",
-		"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.10) Gecko/%d CentOS/1.5.0.10-0.1.el4.centos Firefox/1.5.0.10"
-	};
-
-	// generate server name and IP address for server
-	private int genServerId()
-	{
-		return rand.nextInt(10);
-	}
-	private String genServerName(int serverId)
-	{
-		return new StringBuilder("server").append(new Integer(serverId).toString()).append(".mydomain.com:80").toString();
-	}
-	private String genIpAddress(int serverId)
-	{
-		return new StringBuilder().append(rand.nextInt(255))
-									.append(".").append(rand.nextInt(255)).append(".").append(rand.nextInt(255))
-									.append(".").append(rand.nextInt(255)).toString();
-	}
-	private String getTimeStamp()
-	{
-		return new StringBuilder("[").append(apapcheDateFormat.format(new Date())).append("]").toString();
-	}
-	private String genHttpCode()
-	{
-		return httpStatusCodes[rand.nextInt(httpStatusCodes.length)];
-	}
-	private String genUrl()
-	{
-		String format = urlFormats[rand.nextInt(urlFormats.length)];
-		return String.format(format, rand.nextInt(100));
-	}
-	private String genBrowserId()
-	{
-		String format = browserIds[rand.nextInt(browserIds.length)];
-		return String.format(format, rand.nextInt(100000));
-	}
-
-	// generate log string
-	private String genLogString(String ipAddress, String browserId, String httpCode, String url)
-	{
-		// server/ipaddress
-		int serverId = genServerId();
-		String serverName = genServerName(serverId);
-		if (ipAddress == null)
-		{
-		  ipAddress = genIpAddress(serverId);
-		}
-
-		// time
-		String logTime = getTimeStamp();
-
-		// url
-	  if (url == null)
-	  {
-		  url = new StringBuilder("\"").append("GET").append(" ").append(genUrl()).append(" ").append("HTTP/1.1").append("\"").toString();
-	  }
-
-		// http code
-		if (httpCode == null)
-		{
-		  httpCode = genHttpCode();
-		}
-
-		// number of bytes
-		int numBytes = rand.nextInt(4000);
-
-		// browser id
-		if(browserId == null)
-		{
-			browserId = genBrowserId();
-		}
-
-		// print
-		return new StringBuilder().append(serverName).append(" ").append(ipAddress).append(" - - ").append(logTime).append(" ").append(url).append(" ")
-				         .append(httpCode).append(" ").append(numBytes).append(" \" \" \"").append(browserId).append("\"").toString();
-	}
-
-	@Override
-	public void beginWindow(long windowId)
-	{
-		// TODO Auto-generated method stub
-
-	}
-	@Override
-	public void endWindow()
-	{
-		// TODO Auto-generated method stub
-
-	}
-	boolean genTuples;
-	int attackInterval;
-	@Override
-	public void setup(OperatorContext context)
-	{
-		genTuples = true;
-		attackInterval = rand.nextInt(10)+ 1;
-	}
-	@Override
-	public void teardown()
-	{
-		genTuples = false;
-	}
-	@Override
-	public void emitTuples()
-	{
-		attackInterval--;
-		String browserId = null;
-		String ipAdddress = null;
-		if (attackInterval == 0)
-		{
-			browserId = genBrowserId();
-			ipAdddress = genIpAddress(rand.nextInt(10));
-			attackInterval += rand.nextInt(10) + 1;
-			for (int i = 0; i < rand.nextInt(3); i++) outport.emit(genLogString(ipAdddress, browserId, "404", null));
-			String url = new StringBuilder("\"").append("GET").append(" ").append(genUrl()).append(" ").append("HTTP/1.1").append("\"").toString();
-			for (int i = 0; i < rand.nextInt(3); i++) outport.emit(genLogString(ipAdddress, browserId, "404", url));
-		}
-		for (int i = 0; i < rand.nextInt(100000); i++) outport.emit(genLogString(ipAdddress, browserId, null, null));
-	}
+  public final transient DefaultOutputPort<String> outport = new DefaultOutputPort<String>();
+
+  // server name/ip-address  random variable
+  private Random rand = new Random();
+
+  // Apache date format
+  private static SimpleDateFormat apapcheDateFormat = new SimpleDateFormat("dd/MMM/yyyy:HH:mm:ss Z");
+
+  // http status codes
+  private static String[] httpStatusCodes = {"100", "101", "200", "201", "202", "203", "204", "205", "206", "300", "301",
+      "301", "302", "303", "304", "305", "306", "307", "400", "401", "402", "403",
+      "405", "406", "407", "408", "409", "410", "411", "412", "413", "414",
+      "415", "416", "417", "500", "501", "502", "503", "504", "505"};
+
+  // possible url string formats
+  private static String[] urlFormats = {
+    "mydomain.com/home.php", "mydomain.com/products.php", "mydomain.com/products.php?productid=%d",
+    "mydomain.com/solutions.php", "mydomain.com/solutions.php?solutionid=%d", "mydomain.com/support.php",
+    "mydomain.com/about.php", "mydomain.com/contactus.php", "mydomain.com/services.php",
+    "mydomain.com/services.php?serviceid=%d", "mydomain.com/partners.php", "mydomain.com/partners.php?partnerid=%d"
+  };
+
+  // browser id
+  private static String[] browserIds = {
+    "Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:20.0) Gecko/%d Firefox/20.0",
+    "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/%d Firefox/18.0",
+    "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/%d Fedora/1.0.4-4 Firefox/1.0.",
+    "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.10) Gecko/%d CentOS/1.5.0.10-0.1.el4.centos Firefox/1.5.0.10"
+  };
+
+  // generate server name and IP address for server
+  private int genServerId()
+  {
+    return rand.nextInt(10);
+  }
+
+  private String genServerName(int serverId)
+  {
+    return new StringBuilder("server").append(new Integer(serverId).toString()).append(".mydomain.com:80").toString();
+  }
+
+  private String genIpAddress(int serverId)
+  {
+    return new StringBuilder().append(rand.nextInt(255))
+        .append(".").append(rand.nextInt(255)).append(".").append(rand.nextInt(255))
+        .append(".").append(rand.nextInt(255)).toString();
+  }
+
+  private String getTimeStamp()
+  {
+    return new StringBuilder("[").append(apapcheDateFormat.format(new Date())).append("]").toString();
+  }
+
+  private String genHttpCode()
+  {
+    return httpStatusCodes[rand.nextInt(httpStatusCodes.length)];
+  }
+
+  private String genUrl()
+  {
+    String format = urlFormats[rand.nextInt(urlFormats.length)];
+    return String.format(format, rand.nextInt(100));
+  }
+
+  private String genBrowserId()
+  {
+    String format = browserIds[rand.nextInt(browserIds.length)];
+    return String.format(format, rand.nextInt(100000));
+  }
+
+  // generate log string
+  private String genLogString(String ipAddress, String browserId, String httpCode, String url)
+  {
+    // server/ipaddress
+    int serverId = genServerId();
+    String serverName = genServerName(serverId);
+    if (ipAddress == null) {
+      ipAddress = genIpAddress(serverId);
+    }
+
+    // time
+    String logTime = getTimeStamp();
+
+    // url
+    if (url == null) {
+      url = new StringBuilder("\"").append("GET").append(" ").append(genUrl()).append(" ").append("HTTP/1.1")
+          .append("\"").toString();
+    }
+
+    // http code
+    if (httpCode == null) {
+      httpCode = genHttpCode();
+    }
+
+    // number of bytes
+    int numBytes = rand.nextInt(4000);
+
+    // browser id
+    if (browserId == null) {
+      browserId = genBrowserId();
+    }
+
+    // print
+    return new StringBuilder().append(serverName).append(" ").append(ipAddress).append(" - - ").append(logTime)
+        .append(" ").append(url).append(" ").append(httpCode).append(" ").append(numBytes).append(" \" \" \"")
+        .append(browserId).append("\"").toString();
+  }
+
+  @Override
+  public void beginWindow(long windowId)
+  {
+    // TODO Auto-generated method stub
+
+  }
+
+  @Override
+  public void endWindow()
+  {
+    // TODO Auto-generated method stub
+
+  }
+
+  boolean genTuples;
+  int attackInterval;
+
+  @Override
+  public void setup(OperatorContext context)
+  {
+    genTuples = true;
+    attackInterval = rand.nextInt(10) + 1;
+  }
+
+  @Override
+  public void teardown()
+  {
+    genTuples = false;
+  }
+
+  @Override
+  public void emitTuples()
+  {
+    attackInterval--;
+    String browserId = null;
+    String ipAdddress = null;
+    if (attackInterval == 0) {
+      browserId = genBrowserId();
+      ipAdddress = genIpAddress(rand.nextInt(10));
+      attackInterval += rand.nextInt(10) + 1;
+      for (int i = 0; i < rand.nextInt(3); i++) {
+        outport.emit(genLogString(ipAdddress, browserId, "404", null));
+      }
+      String url = new StringBuilder("\"").append("GET").append(" ").append(genUrl()).append(" ").append("HTTP/1.1")
+          .append("\"").toString();
+      for (int i = 0; i < rand.nextInt(3); i++) {
+        outport.emit(genLogString(ipAdddress, browserId, "404", url));
+      }
+    }
+    for (int i = 0; i < rand.nextInt(100000); i++) {
+      outport.emit(genLogString(ipAdddress, browserId, null, null));
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/CollectionMultiConsoleOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/CollectionMultiConsoleOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/CollectionMultiConsoleOutputOperator.java
index 1c142a4..36b042f 100644
--- a/library/src/main/java/com/datatorrent/lib/io/CollectionMultiConsoleOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/CollectionMultiConsoleOutputOperator.java
@@ -23,8 +23,8 @@ import java.util.Collection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This output operator receives collections as tuples.&nbsp;
@@ -62,7 +62,8 @@ public class CollectionMultiConsoleOutputOperator<E> extends BaseOperator
   /**
    * This input port which receives collection tuples.
    */
-  public final transient DefaultInputPort<Collection<E>> input = new DefaultInputPort<Collection<E>>() {
+  public final transient DefaultInputPort<Collection<E>> input = new DefaultInputPort<Collection<E>>()
+  {
     @Override
     public void process(Collection<E> t)
     {
@@ -73,8 +74,9 @@ public class CollectionMultiConsoleOutputOperator<E> extends BaseOperator
         if (!silent) {
           System.out.println(obj.toString());
         }
-        if (debug)
+        if (debug) {
           logger.info(obj.toString());
+        }
       }
       System.out.println("}");
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/ConsoleOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/ConsoleOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/ConsoleOutputOperator.java
index 5e72d3d..64046b2 100644
--- a/library/src/main/java/com/datatorrent/lib/io/ConsoleOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/ConsoleOutputOperator.java
@@ -18,13 +18,13 @@
  */
 package com.datatorrent.lib.io;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.annotation.Stateless;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * Writes tuples to stdout of the container.
  * <p>
@@ -55,8 +55,7 @@ public class ConsoleOutputOperator extends BaseOperator
       String s;
       if (stringFormat == null) {
         s = t.toString();
-      }
-      else {
+      } else {
         s = String.format(stringFormat, t);
       }
       if (!silent) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/HttpJsonChunksInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/HttpJsonChunksInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/HttpJsonChunksInputOperator.java
index ea1fc8d..ad6f7a6 100644
--- a/library/src/main/java/com/datatorrent/lib/io/HttpJsonChunksInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/HttpJsonChunksInputOperator.java
@@ -18,7 +18,6 @@
  */
 package com.datatorrent.lib.io;
 
-import com.sun.jersey.api.client.ClientResponse;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
@@ -27,12 +26,16 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import org.apache.commons.io.IOUtils;
+
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.io.IOUtils;
+
+import com.sun.jersey.api.client.ClientResponse;
+
 /**
  * This operator reads in JSON data and outputs it as a map.
  * <p>
@@ -75,8 +78,7 @@ public class HttpJsonChunksInputOperator extends AbstractHttpInputOperator<Map<S
           response.close();
           break;
         }
-      }
-      catch (JSONException ex) {
+      } catch (JSONException ex) {
         LOG.error("Caught JSON error:", ex);
       }
       if (bytesRead == -1) {
@@ -108,8 +110,7 @@ public class HttpJsonChunksInputOperator extends AbstractHttpInputOperator<Map<S
         currentChunkLength = nextLength;
 
         //LOG.debug("chunk length: " + line);
-      }
-      catch (NumberFormatException e) {
+      } catch (NumberFormatException e) {
         // add to chunk
         chunkStr.append(line);
         chunkStr.append("\n");

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/HttpLinesInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/HttpLinesInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/HttpLinesInputOperator.java
index a483df1..c355b16 100644
--- a/library/src/main/java/com/datatorrent/lib/io/HttpLinesInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/HttpLinesInputOperator.java
@@ -18,12 +18,13 @@
  */
 package com.datatorrent.lib.io;
 
-import com.sun.jersey.api.client.ClientResponse;
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 
+import com.sun.jersey.api.client.ClientResponse;
+
 /**
  * Incoming data is interpreted as lines of plain text and each tuple output is a line in the content.
  * <p></p>
@@ -47,8 +48,7 @@ public class HttpLinesInputOperator extends AbstractHttpInputOperator<String>
         rawOutput.emit(line);
         outputPort.emit(line);
       }
-    }
-    finally {
+    } finally {
       br.close();
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/HttpPostOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/HttpPostOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/HttpPostOutputOperator.java
index a69761c..57a4d91 100644
--- a/library/src/main/java/com/datatorrent/lib/io/HttpPostOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/HttpPostOutputOperator.java
@@ -22,10 +22,10 @@ import java.util.Map;
 
 import javax.ws.rs.core.MediaType;
 
-import com.sun.jersey.api.client.WebResource;
-
 import org.codehaus.jettison.json.JSONObject;
 
+import com.sun.jersey.api.client.WebResource;
+
 import com.datatorrent.api.Context.OperatorContext;
 
 /**
@@ -51,8 +51,7 @@ public class HttpPostOutputOperator<T> extends AbstractHttpOperator<T>
   {
     if (t instanceof Map) {
       resource.type(MediaType.APPLICATION_JSON).post(new JSONObject((Map<?, ?>)t).toString());
-    }
-    else {
+    } else {
       resource.post(t.toString());
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/IdempotentStorageManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/IdempotentStorageManager.java b/library/src/main/java/com/datatorrent/lib/io/IdempotentStorageManager.java
index 65bda89..11b68a7 100644
--- a/library/src/main/java/com/datatorrent/lib/io/IdempotentStorageManager.java
+++ b/library/src/main/java/com/datatorrent/lib/io/IdempotentStorageManager.java
@@ -162,7 +162,7 @@ public interface IdempotentStorageManager extends StorageAgent, Component<Contex
 
             for (FileStatus status : fs.listStatus(operatorDirStatus.getPath())) {
               String fileName = status.getPath().getName();
-              if(fileName.endsWith(FSStorageAgent.TMP_FILE)) {
+              if (fileName.endsWith(FSStorageAgent.TMP_FILE)) {
                 continue;
               }
               long windowId = Long.parseLong(fileName, 16);
@@ -173,8 +173,7 @@ public interface IdempotentStorageManager extends StorageAgent, Component<Contex
             }
           }
         }
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException(e);
       }
     }
@@ -255,8 +254,7 @@ public interface IdempotentStorageManager extends StorageAgent, Component<Contex
                 deletedOperators.remove(loperator);
                 fs.delete(loperatorPath, true);
               }
-            }
-            else if (loperator == operatorId) {
+            } else if (loperator == operatorId) {
               storageAgent.delete(loperator, lwindow);
             }
           }
@@ -296,7 +294,7 @@ public interface IdempotentStorageManager extends StorageAgent, Component<Contex
 
       for (IdempotentStorageManager storageManager : newManagers) {
 
-        FSIdempotentStorageManager lmanager = (FSIdempotentStorageManager) storageManager;
+        FSIdempotentStorageManager lmanager = (FSIdempotentStorageManager)storageManager;
         lmanager.recoveryPath = this.recoveryPath;
         lmanager.storageAgent = this.storageAgent;
 
@@ -318,7 +316,7 @@ public interface IdempotentStorageManager extends StorageAgent, Component<Contex
         //If some operators were removed then there needs to be a manager which can clean there state when it is not needed.
         if (deletedOperatorsManager == null) {
           //None of the managers were handling deleted operators data.
-          deletedOperatorsManager = (FSIdempotentStorageManager) newManagers.iterator().next();
+          deletedOperatorsManager = (FSIdempotentStorageManager)newManagers.iterator().next();
           deletedOperatorsManager.deletedOperators = Sets.newHashSet();
         }
 
@@ -331,8 +329,7 @@ public interface IdempotentStorageManager extends StorageAgent, Component<Contex
     {
       try {
         fs.close();
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException(e);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/MapMultiConsoleOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/MapMultiConsoleOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/MapMultiConsoleOutputOperator.java
index c461163..1e31552 100644
--- a/library/src/main/java/com/datatorrent/lib/io/MapMultiConsoleOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/MapMultiConsoleOutputOperator.java
@@ -23,8 +23,8 @@ import java.util.Map;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator writes tuples which are maps to standard out of the container.
@@ -57,7 +57,8 @@ public class MapMultiConsoleOutputOperator<K, V> extends BaseOperator
   }
 
   private static final Logger logger = LoggerFactory.getLogger(MapMultiConsoleOutputOperator.class);
-  public final transient DefaultInputPort<Map<K, V>> input = new DefaultInputPort<Map<K, V>>() {
+  public final transient DefaultInputPort<Map<K, V>> input = new DefaultInputPort<Map<K, V>>()
+  {
     @Override
     public void process(Map<K, V> t)
     {
@@ -66,8 +67,9 @@ public class MapMultiConsoleOutputOperator<K, V> extends BaseOperator
         if (!silent) {
           System.out.println(entry.getKey().toString() + "=" + entry.getValue().toString());
         }
-        if (debug)
+        if (debug) {
           logger.info(entry.getKey().toString() + "=" + entry.getValue().toString());
+        }
       }
       System.out.println("}");
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQuery.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQuery.java b/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQuery.java
index 3e8d8d3..7cf883f 100644
--- a/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQuery.java
+++ b/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQuery.java
@@ -161,23 +161,18 @@ public class PubSubWebSocketAppDataQuery extends PubSubWebSocketInputOperator<St
       JSONArray ja = jo.names();
 
       //Make sure that only the correct keys are in the first level of JSON
-      for(int keyIndex = 0;
-          keyIndex < ja.length();
-          keyIndex++) {
+      for (int keyIndex = 0; keyIndex < ja.length(); keyIndex++) {
         String key = ja.getString(keyIndex);
-        if(!(PubSubMessage.DATA_KEY.equals(key) ||
-           PubSubMessage.TOPIC_KEY.equals(key) ||
-           PubSubMessage.TYPE_KEY.equals(key))) {
-          logger.error("{} is not a valid key in the first level of the following pubsub message:\n{}",
-                       key,
-                       message);
+        if (!(PubSubMessage.DATA_KEY.equals(key) ||
+            PubSubMessage.TOPIC_KEY.equals(key) ||
+            PubSubMessage.TYPE_KEY.equals(key))) {
+          logger.error("{} is not a valid key in the first level of the following pubsub message:\n{}", key, message);
           return null;
         }
       }
 
       data = jo.getString(PubSubMessage.DATA_KEY);
-    }
-    catch(JSONException e) {
+    } catch (JSONException e) {
       return null;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataResult.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataResult.java b/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataResult.java
index 4d25f49..e1f3fa1 100644
--- a/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataResult.java
+++ b/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketAppDataResult.java
@@ -19,6 +19,7 @@
 package com.datatorrent.lib.io;
 
 import java.io.IOException;
+import java.net.URI;
 
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
@@ -26,10 +27,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.datatorrent.api.Context.OperatorContext;
-
 import com.datatorrent.common.experimental.AppData;
 import com.datatorrent.common.util.PubSubMessage.PubSubMessageType;
-import java.net.URI;
 
 /**
  * This is an app data pub sub result operator. This operator is used to send results to
@@ -40,8 +39,9 @@ import java.net.URI;
  * @tags output, app data, result
  * @since 3.0.0
  */
-@AppData.AppendQueryIdToTopic(value=true)
-public class PubSubWebSocketAppDataResult extends PubSubWebSocketOutputOperator<String> implements AppData.ConnectionInfoProvider
+@AppData.AppendQueryIdToTopic(value = true)
+public class PubSubWebSocketAppDataResult extends PubSubWebSocketOutputOperator<String>
+    implements AppData.ConnectionInfoProvider
 {
   private static final Logger logger = LoggerFactory.getLogger(PubSubWebSocketAppDataResult.class);
 
@@ -93,8 +93,7 @@ public class PubSubWebSocketAppDataResult extends PubSubWebSocketOutputOperator<
 
     try {
       jo = new JSONObject(t);
-    }
-    catch(JSONException ex) {
+    } catch (JSONException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -102,8 +101,7 @@ public class PubSubWebSocketAppDataResult extends PubSubWebSocketOutputOperator<
 
     try {
       id = jo.getString("id");
-    }
-    catch(JSONException ex) {
+    } catch (JSONException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -115,8 +113,7 @@ public class PubSubWebSocketAppDataResult extends PubSubWebSocketOutputOperator<
       output.put("topic", topic);
       output.put("data", jo);
       output.put("type", PubSubMessageType.PUBLISH.getIdentifier());
-    }
-    catch(JSONException ex) {
+    } catch (JSONException ex) {
       throw new RuntimeException(ex);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketInputOperator.java
index 0b56924..4d5fa9a 100644
--- a/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/PubSubWebSocketInputOperator.java
@@ -82,8 +82,7 @@ public class PubSubWebSocketInputOperator<T> extends WebSocketInputOperator<T>
     super.run();
     try {
       connection.sendMessage(PubSubMessageCodec.constructSubscribeMessage(topic, codec));
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       LOG.error("Exception caught", ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/SimpleSinglePortInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/SimpleSinglePortInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/SimpleSinglePortInputOperator.java
index 5bf31ff..12737ba 100644
--- a/library/src/main/java/com/datatorrent/lib/io/SimpleSinglePortInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/SimpleSinglePortInputOperator.java
@@ -21,10 +21,13 @@ package com.datatorrent.lib.io;
 import java.util.Iterator;
 import java.util.concurrent.ArrayBlockingQueue;
 
-import com.datatorrent.api.*;
+import org.apache.commons.lang3.ClassUtils;
+
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.InputOperator;
+import com.datatorrent.api.Operator;
 import com.datatorrent.common.util.BaseOperator;
-import org.apache.commons.lang3.ClassUtils;
 
 /**
  * This an input operator which passes data from an asynchronous data source to a port processing thread.
@@ -48,7 +51,7 @@ public abstract class SimpleSinglePortInputOperator<T> extends BaseOperator impl
    * The single output port of this input operator.
    * Collects asynchronously emitted tuples and flushes in container thread.
    */
-  final public transient BufferingOutputPort<T> outputPort;
+  public final transient BufferingOutputPort<T> outputPort;
 
   public SimpleSinglePortInputOperator(int portCapacity)
   {
@@ -61,7 +64,7 @@ public abstract class SimpleSinglePortInputOperator<T> extends BaseOperator impl
   }
 
   @Override
-  final public void activate(OperatorContext ctx)
+  public final void activate(OperatorContext ctx)
   {
     isActive = true;
     if (this instanceof Runnable) {
@@ -71,7 +74,7 @@ public abstract class SimpleSinglePortInputOperator<T> extends BaseOperator impl
   }
 
   @Override
-  final public void deactivate()
+  public final void deactivate()
   {
     isActive = false;
     if (ioThread != null) {
@@ -80,7 +83,7 @@ public abstract class SimpleSinglePortInputOperator<T> extends BaseOperator impl
     }
   }
 
-  final public boolean isActive()
+  public final boolean isActive()
   {
     return isActive;
   }
@@ -115,8 +118,7 @@ public abstract class SimpleSinglePortInputOperator<T> extends BaseOperator impl
     {
       try {
         tuples.put(tuple);
-      }
-      catch (InterruptedException ex) {
+      } catch (InterruptedException ex) {
         throw new RuntimeException(ex);
       }
     }
@@ -130,6 +132,6 @@ public abstract class SimpleSinglePortInputOperator<T> extends BaseOperator impl
       }
     }
 
-  };
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/SmtpOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/SmtpOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/SmtpOutputOperator.java
index 6477021..2d6ef86 100644
--- a/library/src/main/java/com/datatorrent/lib/io/SmtpOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/SmtpOutputOperator.java
@@ -18,24 +18,32 @@
  */
 package com.datatorrent.lib.io;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.Context.OperatorContext;
-
-import java.util.*;
-
-import javax.mail.*;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.mail.Authenticator;
+import javax.mail.Message;
+import javax.mail.MessagingException;
+import javax.mail.PasswordAuthentication;
+import javax.mail.Session;
+import javax.mail.Transport;
 import javax.mail.internet.InternetAddress;
 import javax.mail.internet.MimeMessage;
 import javax.validation.constraints.AssertTrue;
 import javax.validation.constraints.NotNull;
 
-import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.lang.StringUtils;
+
 import com.google.common.collect.Maps;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * This operator outputs data to an smtp server.
  * <p></p>
@@ -90,8 +98,7 @@ public class SmtpOutputOperator extends BaseOperator
         message.setContent(mailContent, contentType);
         LOG.info("Sending email for tuple {}", t.toString());
         Transport.send(message);
-      }
-      catch (MessagingException ex) {
+      } catch (MessagingException ex) {
         LOG.error("Something wrong with sending email.", ex);
       }
     }
@@ -264,8 +271,7 @@ public class SmtpOutputOperator extends BaseOperator
       }
       message.setSubject(subject);
       LOG.debug("all recipients {}", Arrays.toString(message.getAllRecipients()));
-    }
-    catch (MessagingException ex) {
+    } catch (MessagingException ex) {
       throw new RuntimeException(ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/WebSocketInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/WebSocketInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/WebSocketInputOperator.java
index f805dcf..eae9e12 100644
--- a/library/src/main/java/com/datatorrent/lib/io/WebSocketInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/WebSocketInputOperator.java
@@ -29,13 +29,12 @@ import org.codehaus.jackson.map.ObjectMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.commons.lang3.ClassUtils;
-
 import org.apache.apex.shaded.ning19.com.ning.http.client.AsyncHttpClient;
 import org.apache.apex.shaded.ning19.com.ning.http.client.AsyncHttpClientConfigBean;
 import org.apache.apex.shaded.ning19.com.ning.http.client.ws.WebSocket;
 import org.apache.apex.shaded.ning19.com.ning.http.client.ws.WebSocketTextListener;
 import org.apache.apex.shaded.ning19.com.ning.http.client.ws.WebSocketUpgradeHandler;
+import org.apache.commons.lang3.ClassUtils;
 
 import com.datatorrent.api.Context.OperatorContext;
 
@@ -60,8 +59,8 @@ public class WebSocketInputOperator<T> extends SimpleSinglePortInputOperator<T>
   //Do not make this @NotNull since null is a valid value for some child classes
   private URI uri;
   private transient AsyncHttpClient client;
-  private transient final JsonFactory jsonFactory = new JsonFactory();
-  protected transient final ObjectMapper mapper = new ObjectMapper(jsonFactory);
+  private final transient JsonFactory jsonFactory = new JsonFactory();
+  protected final transient ObjectMapper mapper = new ObjectMapper(jsonFactory);
   protected transient WebSocket connection;
   private transient boolean connectionClosed = false;
   private transient volatile boolean shutdown = false;
@@ -121,8 +120,7 @@ public class WebSocketInputOperator<T> extends SimpleSinglePortInputOperator<T>
       shutdown = false;
       monThread = new MonitorThread();
       monThread.start();
-    }
-    catch (Exception ex) {
+    } catch (Exception ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -135,8 +133,7 @@ public class WebSocketInputOperator<T> extends SimpleSinglePortInputOperator<T>
       if (monThread != null) {
         monThread.join();
       }
-    }
-    catch (Exception ex) {
+    } catch (Exception ex) {
       LOG.error("Error joining monitor", ex);
     }
 
@@ -171,8 +168,8 @@ public class WebSocketInputOperator<T> extends SimpleSinglePortInputOperator<T>
             connection.close();
             WebSocketInputOperator.this.activate(null);
           }
-        }
-        catch (Exception ex) {
+        } catch (Exception ex) {
+          //swallowing exception
         }
       }
     }
@@ -213,11 +210,10 @@ public class WebSocketInputOperator<T> extends SimpleSinglePortInputOperator<T>
           LOG.debug("Got: " + string);
           try {
             T o = convertMessage(string);
-            if(!(skipNull && o == null)) {
+            if (!(skipNull && o == null)) {
               outputPort.emit(o);
             }
-          }
-          catch (IOException ex) {
+          } catch (IOException ex) {
             LOG.error("Got exception: ", ex);
           }
         }
@@ -242,8 +238,7 @@ public class WebSocketInputOperator<T> extends SimpleSinglePortInputOperator<T>
         }
 
       }).build()).get(5, TimeUnit.SECONDS);
-    }
-    catch (Exception ex) {
+    } catch (Exception ex) {
       LOG.error("Error reading from " + uri, ex);
       if (client != null) {
         client.close();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/WebSocketOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/WebSocketOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/WebSocketOutputOperator.java
index b793183..382be67 100644
--- a/library/src/main/java/com/datatorrent/lib/io/WebSocketOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/WebSocketOutputOperator.java
@@ -38,9 +38,9 @@ import org.apache.apex.shaded.ning19.com.ning.http.client.ws.WebSocketTextListen
 import org.apache.apex.shaded.ning19.com.ning.http.client.ws.WebSocketUpgradeHandler;
 import org.apache.commons.lang3.ClassUtils;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * Reads via WebSocket from given URL as input stream.&nbsp;Incoming data is interpreted as JSONObject and converted to {@link java.util.Map}.
@@ -59,8 +59,8 @@ public class WebSocketOutputOperator<T> extends BaseOperator
   //Do not make this @NotNull since null is a valid value for some child classes
   private URI uri;
   private transient AsyncHttpClient client;
-  private transient final JsonFactory jsonFactory = new JsonFactory();
-  protected transient final ObjectMapper mapper = new ObjectMapper(jsonFactory);
+  private final transient JsonFactory jsonFactory = new JsonFactory();
+  protected final transient ObjectMapper mapper = new ObjectMapper(jsonFactory);
   protected transient WebSocket connection;
   private int ioThreadMultiplier = 1;
   private int numRetries = 3;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/WebSocketServerInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/WebSocketServerInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/WebSocketServerInputOperator.java
index bf1b40e..2814b49 100644
--- a/library/src/main/java/com/datatorrent/lib/io/WebSocketServerInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/WebSocketServerInputOperator.java
@@ -18,18 +18,20 @@
  */
 package com.datatorrent.lib.io;
 
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.InputOperator;
-import com.datatorrent.netlet.util.DTThrowable;
 import javax.servlet.http.HttpServletRequest;
 import javax.validation.constraints.Min;
 import javax.validation.constraints.NotNull;
+
 import org.eclipse.jetty.server.Server;
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
 import org.eclipse.jetty.websocket.WebSocket;
 import org.eclipse.jetty.websocket.WebSocketServlet;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.InputOperator;
+import com.datatorrent.netlet.util.DTThrowable;
+
 @org.apache.hadoop.classification.InterfaceStability.Evolving
 /**
  * @since 3.3.0
@@ -75,8 +77,7 @@ public abstract class WebSocketServerInputOperator implements InputOperator
 
     try {
       server.start();
-    }
-    catch(Exception ex) {
+    } catch (Exception ex) {
       DTThrowable.rethrow(ex);
     }
   }
@@ -86,8 +87,7 @@ public abstract class WebSocketServerInputOperator implements InputOperator
   {
     try {
       server.stop();
-    }
-    catch(Exception ex) {
+    } catch (Exception ex) {
       DTThrowable.rethrow(ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/WidgetOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/WidgetOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/WidgetOutputOperator.java
index 5b6259c..b027b58 100644
--- a/library/src/main/java/com/datatorrent/lib/io/WidgetOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/WidgetOutputOperator.java
@@ -25,17 +25,16 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import com.google.common.collect.Maps;
-
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.tuple.MutablePair;
 import org.apache.commons.lang3.tuple.Pair;
 
+import com.google.common.collect.Maps;
+
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
-
 import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.common.util.PubSubMessageCodec;
 
@@ -60,16 +59,15 @@ import com.datatorrent.common.util.PubSubMessageCodec;
 @org.apache.hadoop.classification.InterfaceStability.Evolving
 public class WidgetOutputOperator extends BaseOperator
 {
-  protected transient WebSocketOutputOperator<Pair<String, Object>> wsoo = new WebSocketOutputOperator<Pair<String,Object>>(){
-
+  protected transient WebSocketOutputOperator<Pair<String, Object>> wsoo = new WebSocketOutputOperator<Pair<String, Object>>()
+  {
     private transient PubSubMessageCodec<Object> codec = new PubSubMessageCodec<>(mapper);
 
     @Override
-    public String convertMapToMessage(Pair<String,Object> t) throws IOException
+    public String convertMapToMessage(Pair<String, Object> t) throws IOException
     {
       return PubSubMessageCodec.constructPublishMessage(t.getLeft(), t.getRight(), codec);
     }
-
   };
 
   protected transient ConsoleOutputOperator coo = new ConsoleOutputOperator();
@@ -99,31 +97,31 @@ public class WidgetOutputOperator extends BaseOperator
   /**
    * Tuples received on this input port will be sent to a Simple Widget for display.
    */
-  @InputPortFieldAnnotation(optional=true)
+  @InputPortFieldAnnotation(optional = true)
   public final transient SimpleInputPort simpleInput = new SimpleInputPort(this);
 
   /**
    * Tuples received on this input port will be sent to a Time Series Widget for display.
    */
-  @InputPortFieldAnnotation(optional=true)
+  @InputPortFieldAnnotation(optional = true)
   public final transient TimeseriesInputPort timeSeriesInput = new TimeseriesInputPort(this);
 
   /**
    * Tuples received on this input port will be sent to a Percentage Widget.
    */
-  @InputPortFieldAnnotation(optional=true)
+  @InputPortFieldAnnotation(optional = true)
   public final transient PercentageInputPort percentageInput = new PercentageInputPort(this);
 
   /**
    * Tuples received on this input port will be sent to a Top N Widget for display.
    */
-  @InputPortFieldAnnotation(optional=true)
+  @InputPortFieldAnnotation(optional = true)
   public final transient TopNInputPort topNInput = new TopNInputPort(this);
 
   /**
    * Tuples received on this input port will be sent to a Pie Chart Widget for display.
    */
-  @InputPortFieldAnnotation(optional=true)
+  @InputPortFieldAnnotation(optional = true)
   public final transient PiechartInputPort pieChartInput = new PiechartInputPort(this);
 
   protected transient boolean isWebSocketConnected = true;
@@ -132,7 +130,7 @@ public class WidgetOutputOperator extends BaseOperator
   public void setup(OperatorContext context)
   {
     String gatewayAddress = context.getValue(DAG.GATEWAY_CONNECT_ADDRESS);
-    if(!StringUtils.isEmpty(gatewayAddress)){
+    if (!StringUtils.isEmpty(gatewayAddress)) {
       wsoo.setUri(URI.create("ws://" + gatewayAddress + "/pubsub"));
       wsoo.setup(context);
     } else {
@@ -205,8 +203,8 @@ public class WidgetOutputOperator extends BaseOperator
 
   }
 
-  public static class TopNInputPort extends DefaultInputPort<HashMap<String, Number>>{
-
+  public static class TopNInputPort extends DefaultInputPort<HashMap<String, Number>>
+  {
     private final WidgetOutputOperator operator;
 
     public TopNInputPort(WidgetOutputOperator oper)

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileInputOperator.java
index b5b20e4..bf1605d 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileInputOperator.java
@@ -96,8 +96,8 @@ import com.datatorrent.lib.util.KryoCloneUtils;
  * @param <T> The type of the object that this input operator reads.
  * @since 1.0.2
  */
-public abstract class AbstractFileInputOperator<T> implements InputOperator, Partitioner<AbstractFileInputOperator<T>>, StatsListener,
-  Operator.CheckpointListener
+public abstract class AbstractFileInputOperator<T>
+    implements InputOperator, Partitioner<AbstractFileInputOperator<T>>, StatsListener, Operator.CheckpointListener
 {
   private static final Logger LOG = LoggerFactory.getLogger(AbstractFileInputOperator.class);
 
@@ -110,11 +110,11 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
   protected String currentFile;
   protected Set<String> processedFiles = new HashSet<String>();
   protected int emitBatchSize = 1000;
-  protected int currentPartitions = 1 ;
+  protected int currentPartitions = 1;
   protected int partitionCount = 1;
   private int retryCount = 0;
   private int maxRetryCount = 5;
-  transient protected int skipCount = 0;
+  protected transient int skipCount = 0;
   private transient OperatorContext context;
 
   private final BasicCounters<MutableLong> fileCounters = new BasicCounters<MutableLong>(MutableLong.class);
@@ -140,7 +140,8 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
    * failed file is retried for maxRetryCount number of times, after that the file is
    * ignored.
    */
-  protected static class FailedFile {
+  protected static class FailedFile
+  {
     String path;
     int   offset;
     int    retryCount;
@@ -150,13 +151,15 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
     @SuppressWarnings("unused")
     protected FailedFile() {}
 
-    protected FailedFile(String path, int offset) {
+    protected FailedFile(String path, int offset)
+    {
       this.path = path;
       this.offset = offset;
       this.retryCount = 0;
     }
 
-    protected FailedFile(String path, int offset, int retryCount) {
+    protected FailedFile(String path, int offset, int retryCount)
+    {
       this.path = path;
       this.offset = offset;
       this.retryCount = retryCount;
@@ -266,7 +269,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
    * <p/>
    * @since 1.0.4
    */
-  public final static class FileCountersAggregator implements CountersAggregator, Serializable
+  public static final class FileCountersAggregator implements CountersAggregator, Serializable
   {
     private static final long serialVersionUID = 201409041428L;
     MutableLong totalLocalProcessedFiles = new MutableLong();
@@ -278,11 +281,11 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
     @SuppressWarnings("unchecked")
     public Object aggregate(Collection<?> countersList)
     {
-      if(countersList.isEmpty()) {
+      if (countersList.isEmpty()) {
         return null;
       }
 
-      BasicCounters<MutableLong> tempFileCounters = (BasicCounters<MutableLong>) countersList.iterator().next();
+      BasicCounters<MutableLong> tempFileCounters = (BasicCounters<MutableLong>)countersList.iterator().next();
       MutableLong globalProcessedFiles = tempFileCounters.getCounter(FileCounters.GLOBAL_PROCESSED_FILES);
       MutableLong globalNumberOfFailures = tempFileCounters.getCounter(FileCounters.GLOBAL_NUMBER_OF_FAILURES);
       MutableLong globalNumberOfRetries = tempFileCounters.getCounter(FileCounters.GLOBAL_NUMBER_OF_RETRIES);
@@ -291,8 +294,8 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
       totalLocalNumberOfFailures.setValue(0);
       totalLocalNumberOfRetries.setValue(0);
 
-      for(Object fileCounters: countersList) {
-        BasicCounters<MutableLong> basicFileCounters = (BasicCounters<MutableLong>) fileCounters;
+      for (Object fileCounters : countersList) {
+        BasicCounters<MutableLong> basicFileCounters = (BasicCounters<MutableLong>)fileCounters;
         totalLocalProcessedFiles.add(basicFileCounters.getCounter(FileCounters.LOCAL_PROCESSED_FILES));
         pendingFiles.add(basicFileCounters.getCounter(FileCounters.PENDING_FILES));
         totalLocalNumberOfFailures.add(basicFileCounters.getCounter(FileCounters.LOCAL_NUMBER_OF_FAILURES));
@@ -441,25 +444,17 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
       filePath = new Path(directory);
       configuration = new Configuration();
       fs = getFSInstance();
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       failureHandling(ex);
     }
 
-    fileCounters.setCounter(FileCounters.GLOBAL_PROCESSED_FILES,
-                            globalProcessedFileCount);
-    fileCounters.setCounter(FileCounters.LOCAL_PROCESSED_FILES,
-                            localProcessedFileCount);
-    fileCounters.setCounter(FileCounters.GLOBAL_NUMBER_OF_FAILURES,
-                            globalNumberOfFailures);
-    fileCounters.setCounter(FileCounters.LOCAL_NUMBER_OF_FAILURES,
-                            localNumberOfFailures);
-    fileCounters.setCounter(FileCounters.GLOBAL_NUMBER_OF_RETRIES,
-                            globalNumberOfRetries);
-    fileCounters.setCounter(FileCounters.LOCAL_NUMBER_OF_RETRIES,
-                            localNumberOfRetries);
-    fileCounters.setCounter(FileCounters.PENDING_FILES,
-                            pendingFileCount);
+    fileCounters.setCounter(FileCounters.GLOBAL_PROCESSED_FILES, globalProcessedFileCount);
+    fileCounters.setCounter(FileCounters.LOCAL_PROCESSED_FILES, localProcessedFileCount);
+    fileCounters.setCounter(FileCounters.GLOBAL_NUMBER_OF_FAILURES, globalNumberOfFailures);
+    fileCounters.setCounter(FileCounters.LOCAL_NUMBER_OF_FAILURES, localNumberOfFailures);
+    fileCounters.setCounter(FileCounters.GLOBAL_NUMBER_OF_RETRIES, globalNumberOfRetries);
+    fileCounters.setCounter(FileCounters.LOCAL_NUMBER_OF_RETRIES, localNumberOfRetries);
+    fileCounters.setCounter(FileCounters.PENDING_FILES, pendingFileCount);
 
     idempotentStorageManager.setup(context);
     if (context.getValue(OperatorContext.ACTIVATION_WINDOW_ID) < idempotentStorageManager.getLargestRecoveryWindow()) {
@@ -487,11 +482,10 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
     boolean fileFailed = false;
 
     try {
-      if(inputStream != null) {
+      if (inputStream != null) {
         inputStream.close();
       }
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       savedException = ex;
       fileFailed = true;
     }
@@ -500,20 +494,19 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
 
     try {
       fs.close();
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       savedException = ex;
       fsFailed = true;
     }
 
-    if(savedException != null) {
+    if (savedException != null) {
       String errorMessage = "";
 
-      if(fileFailed) {
+      if (fileFailed) {
         errorMessage += "Failed to close " + currentFile + ". ";
       }
 
-      if(fsFailed) {
+      if (fsFailed) {
         errorMessage += "Failed to close filesystem.";
       }
 
@@ -537,18 +530,15 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
     if (currentWindowId > idempotentStorageManager.getLargestRecoveryWindow()) {
       try {
         idempotentStorageManager.save(currentWindowRecoveryState, operatorId, currentWindowId);
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException("saving recovery", e);
       }
     }
     currentWindowRecoveryState.clear();
-    if(context != null) {
-      pendingFileCount.setValue(pendingFiles.size() +
-                                     failedFiles.size() +
-                                     unfinishedFiles.size());
+    if (context != null) {
+      pendingFileCount.setValue(pendingFiles.size() + failedFiles.size() + unfinishedFiles.size());
 
-      if(currentFile != null) {
+      if (currentFile != null) {
         pendingFileCount.increment();
       }
 
@@ -567,7 +557,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
 
       for (Object recovery : recoveryDataPerOperator.values()) {
         @SuppressWarnings("unchecked")
-        LinkedList<RecoveryEntry> recoveryData = (LinkedList<RecoveryEntry>) recovery;
+        LinkedList<RecoveryEntry> recoveryData = (LinkedList<RecoveryEntry>)recovery;
 
         for (RecoveryEntry recoveryEntry : recoveryData) {
           if (scanner.acceptFile(recoveryEntry.file)) {
@@ -606,8 +596,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
                 offset++;
                 emit(line);
               }
-            }
-            else {
+            } else {
               while (offset < recoveryEntry.endOffset) {
                 T line = readEntity();
                 offset++;
@@ -617,8 +606,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
           }
         }
       }
-    }
-    catch (IOException e) {
+    } catch (IOException e) {
       throw new RuntimeException("replay", e);
     }
   }
@@ -645,24 +633,20 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
             offset = 0;
             skipCount = 0;
           }
-        }
-        else if (!unfinishedFiles.isEmpty()) {
+        } else if (!unfinishedFiles.isEmpty()) {
           retryFailedFile(unfinishedFiles.poll());
-        }
-        else if (!pendingFiles.isEmpty()) {
+        } else if (!pendingFiles.isEmpty()) {
           String newPathString = pendingFiles.iterator().next();
           pendingFiles.remove(newPathString);
-          if (fs.exists(new Path(newPathString)))
+          if (fs.exists(new Path(newPathString))) {
             this.inputStream = openFile(new Path(newPathString));
-        }
-        else if (!failedFiles.isEmpty()) {
+          }
+        } else if (!failedFiles.isEmpty()) {
           retryFailedFile(failedFiles.poll());
-        }
-        else {
+        } else {
           scanDirectory();
         }
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         failureHandling(ex);
       }
     }
@@ -687,13 +671,11 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
           if (skipCount == 0) {
             offset++;
             emit(line);
-          }
-          else {
+          } else {
             skipCount--;
           }
         }
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         failureHandling(e);
       }
       //Only when something was emitted from the file then we record it for entry.
@@ -708,10 +690,10 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
    */
   protected void scanDirectory()
   {
-    if(System.currentTimeMillis() - scanIntervalMillis >= lastScanMillis) {
+    if (System.currentTimeMillis() - scanIntervalMillis >= lastScanMillis) {
       Set<Path> newPaths = scanner.scan(fs, filePath, processedFiles);
 
-      for(Path newPath : newPaths) {
+      for (Path newPath : newPaths) {
         String newPathString = newPath.toString();
         pendingFiles.add(newPathString);
         processedFiles.add(newPathString);
@@ -729,27 +711,28 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
   private void failureHandling(Exception e)
   {
     localNumberOfFailures.increment();
-    if(maxRetryCount <= 0) {
+    if (maxRetryCount <= 0) {
       throw new RuntimeException(e);
     }
     LOG.error("FS reader error", e);
     addToFailedList();
   }
 
-  protected void addToFailedList() {
-
+  protected void addToFailedList()
+  {
     FailedFile ff = new FailedFile(currentFile, offset, retryCount);
 
     try {
       // try to close file
-      if (this.inputStream != null)
+      if (this.inputStream != null) {
         this.inputStream.close();
-    } catch(IOException e) {
+      }
+    } catch (IOException e) {
       localNumberOfFailures.increment();
       LOG.error("Could not close input stream on: " + currentFile);
     }
 
-    ff.retryCount ++;
+    ff.retryCount++;
     ff.lastFailedTime = System.currentTimeMillis();
     ff.offset = this.offset;
 
@@ -757,8 +740,9 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
     this.currentFile = null;
     this.inputStream = null;
 
-    if (ff.retryCount > maxRetryCount)
+    if (ff.retryCount > maxRetryCount) {
       return;
+    }
 
     localNumberOfRetries.increment();
     LOG.info("adding to failed list path {} offset {} retry {}", ff.path, ff.offset, ff.retryCount);
@@ -769,8 +753,9 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
   {
     LOG.info("retrying failed file {} offset {} retry {}", ff.path, ff.offset, ff.retryCount);
     String path = ff.path;
-    if (!fs.exists(new Path(path)))
+    if (!fs.exists(new Path(path))) {
       return null;
+    }
     this.inputStream = openFile(new Path(path));
     this.offset = ff.offset;
     this.retryCount = ff.retryCount;
@@ -793,8 +778,9 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
   {
     LOG.info("closing file {} offset {}", currentFile, offset);
 
-    if (is != null)
+    if (is != null) {
       is.close();
+    }
 
     currentFile = null;
     inputStream = null;
@@ -828,7 +814,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
     List<String> totalPendingFiles = Lists.newLinkedList();
     Set<Integer> deletedOperators =  Sets.newHashSet();
 
-    for(Partition<AbstractFileInputOperator<T>> partition : partitions) {
+    for (Partition<AbstractFileInputOperator<T>> partition : partitions) {
       AbstractFileInputOperator<T> oper = partition.getPartitionedInstance();
       totalProcessedFiles.addAll(oper.processedFiles);
       totalFailedFiles.addAll(oper.failedFiles);
@@ -853,7 +839,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
     Collection<IdempotentStorageManager> newManagers = Lists.newArrayListWithExpectedSize(totalCount);
 
     KryoCloneUtils<AbstractFileInputOperator<T>> cloneUtils = KryoCloneUtils.createCloneUtils(this);
-    for (int i=0; i<scanners.size(); i++) {
+    for (int i = 0; i < scanners.size(); i++) {
 
       @SuppressWarnings("unchecked")
       AbstractFileInputOperator<T> oper = cloneUtils.getClone();
@@ -873,7 +859,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
       oper.currentFile = null;
       oper.offset = 0;
       Iterator<FailedFile> unfinishedIter = currentFiles.iterator();
-      while(unfinishedIter.hasNext()) {
+      while (unfinishedIter.hasNext()) {
         FailedFile unfinishedFile = unfinishedIter.next();
         if (scn.acceptFile(unfinishedFile.path)) {
           oper.unfinishedFiles.add(unfinishedFile);
@@ -895,9 +881,9 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
       /* redistribute pending files properly */
       oper.pendingFiles.clear();
       Iterator<String> pendingFilesIterator = totalPendingFiles.iterator();
-      while(pendingFilesIterator.hasNext()) {
+      while (pendingFilesIterator.hasNext()) {
         String pathString = pendingFilesIterator.next();
-        if(scn.acceptFile(pathString)) {
+        if (scn.acceptFile(pathString)) {
           oper.pendingFiles.add(pathString);
           pendingFilesIterator.remove();
         }
@@ -932,8 +918,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
   {
     try {
       idempotentStorageManager.deleteUpTo(operatorId, windowId);
-    }
-    catch (IOException e) {
+    } catch (IOException e) {
       throw new RuntimeException(e);
     }
   }
@@ -941,16 +926,18 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
   /**
    * Read the next item from the stream. Depending on the type of stream, this could be a byte array, line or object.
    * Upon return of null, the stream will be considered fully consumed.
-   * @throws IOException
+   *
    * @return Depending on the type of stream an object is returned. When null is returned the stream is consumed.
+   * @throws IOException
    */
-  abstract protected T readEntity() throws IOException;
+  protected abstract T readEntity() throws IOException;
 
   /**
    * Emit the tuple on the port
+   *
    * @param tuple
    */
-  abstract protected void emit(T tuple);
+  protected abstract void emit(T tuple);
 
 
   /**
@@ -1017,17 +1004,21 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
       this.regex = null;
     }
 
-    public int getPartitionCount() {
+    public int getPartitionCount()
+    {
       return partitionCount;
     }
 
-    public int getPartitionIndex() {
+    public int getPartitionIndex()
+    {
       return partitionIndex;
     }
 
-    protected Pattern getRegex() {
-      if (this.regex == null && this.filePatternRegexp != null)
+    protected Pattern getRegex()
+    {
+      if (this.regex == null && this.filePatternRegexp != null) {
         this.regex = Pattern.compile(this.filePatternRegexp);
+      }
       return this.regex;
     }
 
@@ -1037,8 +1028,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
       try {
         LOG.debug("Scanning {} with pattern {}", filePath, this.filePatternRegexp);
         FileStatus[] files = fs.listStatus(filePath);
-        for (FileStatus status : files)
-        {
+        for (FileStatus status : files) {
           Path path = status.getPath();
           String filePathStr = path.toString();
 
@@ -1081,8 +1071,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
         }
       }
       Pattern regex = this.getRegex();
-      if (regex != null)
-      {
+      if (regex != null) {
         Matcher matcher = regex.matcher(filePathStr);
         if (!matcher.matches()) {
           return false;
@@ -1094,13 +1083,14 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
     public List<DirectoryScanner> partition(int count)
     {
       ArrayList<DirectoryScanner> partitions = Lists.newArrayListWithExpectedSize(count);
-      for (int i=0; i<count; i++) {
+      for (int i = 0; i < count; i++) {
         partitions.add(this.createPartition(i, count));
       }
       return partitions;
     }
 
-    public List<DirectoryScanner>  partition(int count , @SuppressWarnings("unused") Collection<DirectoryScanner> scanners) {
+    public List<DirectoryScanner>  partition(int count, @SuppressWarnings("unused") Collection<DirectoryScanner> scanners)
+    {
       return partition(count);
     }
 
@@ -1153,7 +1143,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
         return false;
       }
 
-      RecoveryEntry that = (RecoveryEntry) o;
+      RecoveryEntry that = (RecoveryEntry)o;
 
       if (endOffset != that.endOffset) {
         return false;
@@ -1192,7 +1182,7 @@ public abstract class AbstractFileInputOperator<T> implements InputOperator, Par
    */
   public static class FileLineInputOperator extends AbstractFileInputOperator<String>
   {
-    public transient final DefaultOutputPort<String> output = new DefaultOutputPort<String>();
+    public final transient DefaultOutputPort<String> output = new DefaultOutputPort<String>();
 
     protected transient BufferedReader br;
 


[09/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java b/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java
index 25e4f27..cb559dc 100644
--- a/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java
+++ b/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java
@@ -54,12 +54,13 @@ public class ReusableByteArrayInputStream extends ByteArrayInputStream
     mark = 0;
   }
   
-  
-  public int getPos(){
+  public int getPos()
+  {
     return pos;
   }
   
-  public byte[] getBuf(){
+  public byte[] getBuf()
+  {
     return buf;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcherTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcherTest.java b/library/src/test/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcherTest.java
index f277244..a3d3019 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcherTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcherTest.java
@@ -31,8 +31,8 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.lib.testbench.CollectorTestSink;
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/AllAfterMatchMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/AllAfterMatchMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/AllAfterMatchMapTest.java
index 9f51ce2..d3d69cf 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/AllAfterMatchMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/AllAfterMatchMapTest.java
@@ -78,17 +78,13 @@ public class AllAfterMatchMapTest
     Assert.assertEquals("number emitted tuples", 3,
         allSink.collectedTuples.size());
     for (Object o : allSink.collectedTuples) {
-      for (Map.Entry<String, Number> e : ((HashMap<String, Number>) o)
-              .entrySet()) {
+      for (Map.Entry<String, Number> e : ((HashMap<String, Number>)o).entrySet()) {
         if (e.getKey().equals("a")) {
-          Assert.assertEquals("emitted value for 'a' was ", new Double(3), new Double( e
-                  .getValue().doubleValue()));
+          Assert.assertEquals("emitted value for 'a' was ", new Double(3), new Double(e.getValue().doubleValue()));
         } else if (e.getKey().equals("b")) {
-          Assert.assertEquals("emitted tuple for 'b' was ", new Double(6), new Double(e
-                  .getValue().doubleValue()));
+          Assert.assertEquals("emitted tuple for 'b' was ", new Double(6), new Double(e.getValue().doubleValue()));
         } else if (e.getKey().equals("c")) {
-          Assert.assertEquals("emitted tuple for 'c' was ", new Double(9), new Double(e
-                  .getValue().doubleValue()));
+          Assert.assertEquals("emitted tuple for 'c' was ", new Double(9), new Double(e.getValue().doubleValue()));
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/BottomNMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/BottomNMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/BottomNMapTest.java
index 5e39e44..c51ee2b 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/BottomNMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/BottomNMapTest.java
@@ -102,16 +102,14 @@ public class BottomNMapTest
     oper.endWindow();
 
     Assert.assertEquals("number emitted tuples", 3, sortSink.collectedTuples.size());
-    for (Object o: sortSink.collectedTuples) {
+    for (Object o : sortSink.collectedTuples) {
       log.debug(o.toString());
-      for (Map.Entry<String, ArrayList<Number>> e: ((HashMap<String, ArrayList<Number>>)o).entrySet()) {
+      for (Map.Entry<String, ArrayList<Number>> e : ((HashMap<String, ArrayList<Number>>)o).entrySet()) {
         if (e.getKey().equals("a")) {
           Assert.assertEquals("emitted value for 'a' was ", 3, e.getValue().size());
-        }
-        else if (e.getKey().equals("b")) {
+        } else if (e.getKey().equals("b")) {
           Assert.assertEquals("emitted tuple for 'b' was ", 3, e.getValue().size());
-        }
-        else if (e.getKey().equals("c")) {
+        } else if (e.getKey().equals("c")) {
           Assert.assertEquals("emitted tuple for 'c' was ", 1, e.getValue().size());
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java b/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java
index 23629e9..7f3061b 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java
@@ -21,9 +21,8 @@ package com.datatorrent.lib.algo;
 import java.util.ArrayList;
 import java.util.HashMap;
 
-import org.junit.Test;
-
 import org.junit.Assert;
+import org.junit.Test;
 
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
@@ -31,10 +30,11 @@ public class BottomNUnifierTest
 {
   @SuppressWarnings({ "rawtypes", "unchecked" })
   @Test
-  public void testUnifier() {
+  public void testUnifier()
+  {
     
     // Instantiate unifier
-    BottomNUnifier<String, Integer> oper = new BottomNUnifier<String, Integer> ();
+    BottomNUnifier<String, Integer> oper = new BottomNUnifier<>();
     oper.setN(2);
     CollectorTestSink sink = new CollectorTestSink();
     oper.mergedport.setSink(sink);
@@ -55,7 +55,7 @@ public class BottomNUnifierTest
     oper.endWindow();
     
     Assert.assertEquals("Tuples in sink", sink.collectedTuples.size(), 1);
-    tuple = (HashMap<String, ArrayList<Integer>>) sink.collectedTuples.get(0);
+    tuple = (HashMap<String, ArrayList<Integer>>)sink.collectedTuples.get(0);
     values = tuple.get("a");
     Assert.assertEquals(2, values.size());
     Assert.assertEquals(true, values.indexOf(2) >= 0);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/BottomNUniqueMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/BottomNUniqueMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/BottomNUniqueMapTest.java
index c993a0a..cb3667f 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/BottomNUniqueMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/BottomNUniqueMapTest.java
@@ -103,16 +103,15 @@ public class BottomNUniqueMapTest
     oper.endWindow();
 
     Assert.assertEquals("number emitted tuples", 3, sortSink.collectedTuples.size());
-    for (Object o: sortSink.collectedTuples) {
+    for (Object o : sortSink.collectedTuples) {
       log.debug(o.toString());
-      for (Map.Entry<String, ArrayList<HashMap<Number, Integer>>> e: ((HashMap<String, ArrayList<HashMap<Number, Integer>>>)o).entrySet()) {
+      for (Map.Entry<String, ArrayList<HashMap<Number, Integer>>> e : ((HashMap<String, ArrayList<HashMap<Number,
+          Integer>>>)o).entrySet()) {
         if (e.getKey().equals("a")) {
           Assert.assertEquals("emitted value for 'a' was ", 3, e.getValue().size());
-        }
-        else if (e.getKey().equals("b")) {
+        } else if (e.getKey().equals("b")) {
           Assert.assertEquals("emitted tuple for 'b' was ", 3, e.getValue().size());
-        }
-        else if (e.getKey().equals("c")) {
+        } else if (e.getKey().equals("c")) {
           Assert.assertEquals("emitted tuple for 'c' was ", 1, e.getValue().size());
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/DistinctMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/DistinctMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/DistinctMapTest.java
index 1159fe5..249c39d 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/DistinctMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/DistinctMapTest.java
@@ -96,11 +96,9 @@ public class DistinctMapTest
         String key = e.getKey();
         if (key.equals("a")) {
           aval += e.getValue();
-        }
-        else if (key.equals("b")) {
+        } else if (key.equals("b")) {
           bval += e.getValue();
-        }
-        else if (key.equals("c")) {
+        } else if (key.equals("c")) {
           cval += e.getValue();
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/FilterKeysMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/FilterKeysMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/FilterKeysMapTest.java
index 2eb073e..f00652e 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/FilterKeysMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/FilterKeysMapTest.java
@@ -57,7 +57,7 @@ public class FilterKeysMapTest
     oper.filter.setSink(sortSink);
     oper.setKey("b");
     oper.clearKeys();
-    String [] keys = new String[3];
+    String[] keys = new String[3];
     keys[0] = "e";
     keys[1] = "f";
     keys[2] = "blah";

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/FilterValuesTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/FilterValuesTest.java b/library/src/test/java/com/datatorrent/lib/algo/FilterValuesTest.java
index 061d3e9..57b8f19 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/FilterValuesTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/FilterValuesTest.java
@@ -37,7 +37,7 @@ public class FilterValuesTest
   @SuppressWarnings({ "rawtypes", "unchecked" })
   int getTotal(List list)
   {
-    ArrayList<Integer> ilist = (ArrayList<Integer>) list;
+    ArrayList<Integer> ilist = (ArrayList<Integer>)list;
     int ret = 0;
     for (Integer i : ilist) {
       ret += i;
@@ -56,7 +56,7 @@ public class FilterValuesTest
 
     CollectorTestSink sortSink = new CollectorTestSink();
     oper.filter.setSink(sortSink);
-    Integer [] values = new Integer[2];
+    Integer[] values = new Integer[2];
     oper.setValue(5);
     oper.clearValues();
     values[0] = 200;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/FirstNTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/FirstNTest.java b/library/src/test/java/com/datatorrent/lib/algo/FirstNTest.java
index 8005f9d..e118459 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/FirstNTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/FirstNTest.java
@@ -103,15 +103,13 @@ public class FirstNTest
     int aval = 0;
     int bval = 0;
     int cval = 0;
-    for (Object o: sortSink.collectedTuples) {
-      for (Map.Entry<String, Number> e: ((HashMap<String, Number>)o).entrySet()) {
+    for (Object o : sortSink.collectedTuples) {
+      for (Map.Entry<String, Number> e : ((HashMap<String, Number>)o).entrySet()) {
         if (e.getKey().equals("a")) {
           aval += e.getValue().intValue();
-        }
-        else if (e.getKey().equals("b")) {
+        } else if (e.getKey().equals("b")) {
           bval += e.getValue().intValue();
-        }
-        else if (e.getKey().equals("c")) {
+        } else if (e.getKey().equals("c")) {
           cval += e.getValue().intValue();
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/InsertSortDescTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/InsertSortDescTest.java b/library/src/test/java/com/datatorrent/lib/algo/InsertSortDescTest.java
index 64b9a90..1dc5a4f 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/InsertSortDescTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/InsertSortDescTest.java
@@ -86,10 +86,10 @@ public class InsertSortDescTest
 
     Assert.assertEquals("number emitted tuples", 1, sortSink.collectedTuples.size());
     Assert.assertEquals("number emitted tuples", 1, hashSink.collectedTuples.size());
-    HashMap map = (HashMap) hashSink.collectedTuples.get(0);
-    input = (ArrayList) sortSink.collectedTuples.get(0);
-    for (Object o: input) {
-     log.debug(String.format("%s : %s", o.toString(), map.get(o).toString()));
+    HashMap map = (HashMap)hashSink.collectedTuples.get(0);
+    input = (ArrayList)sortSink.collectedTuples.get(0);
+    for (Object o : input) {
+      log.debug(String.format("%s : %s", o.toString(), map.get(o).toString()));
     }
     log.debug(String.format("Tested %s type with %d tuples and %d uniques\n", debug, input.size(), map.size()));
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/InsertSortTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/InsertSortTest.java b/library/src/test/java/com/datatorrent/lib/algo/InsertSortTest.java
index 58ae169..4af8793 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/InsertSortTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/InsertSortTest.java
@@ -78,6 +78,6 @@ public class InsertSortTest
     oper.endWindow();
 
     Assert.assertEquals("number emitted tuples", 1, sortSink.collectedTuples.size());
-    input = (ArrayList) sortSink.collectedTuples.get(0);
+    input = (ArrayList)sortSink.collectedTuples.get(0);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/InvertIndexArrayTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/InvertIndexArrayTest.java b/library/src/test/java/com/datatorrent/lib/algo/InvertIndexArrayTest.java
index dcf1f3d..9a3ce81 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/InvertIndexArrayTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/InvertIndexArrayTest.java
@@ -38,7 +38,7 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class InvertIndexArrayTest
 {
-    private static Logger log = LoggerFactory.getLogger(InvertIndexArrayTest.class);
+  private static Logger log = LoggerFactory.getLogger(InvertIndexArrayTest.class);
 
   /**
    * Test oper logic emits correct results
@@ -83,13 +83,13 @@ public class InvertIndexArrayTest
           Assert.assertEquals("Index for \"str1\" contains \"a\"", true, alist.contains("a"));
           Assert.assertEquals("Index for \"str1\" contains \"b\"", true, alist.contains("b"));
           Assert.assertEquals("Index for \"str1\" contains \"c\"", true, alist.contains("c"));
-        }
-        else if (key.equals("str")) {
+
+        } else if (key.equals("str")) {
           Assert.assertEquals("Index for \"str1\" contains \"a\"", true, alist.contains("a"));
           Assert.assertEquals("Index for \"str1\" contains \"b\"", true, alist.contains("b"));
           Assert.assertEquals("Index for \"str1\" contains \"c\"", false, alist.contains("c"));
-        }
-        else if (key.equals("blah")) {
+
+        } else if (key.equals("blah")) {
           Assert.assertEquals("Index for \"str1\" contains \"a\"", false, alist.contains("a"));
           Assert.assertEquals("Index for \"str1\" contains \"b\"", false, alist.contains("b"));
           Assert.assertEquals("Index for \"str1\" contains \"c\"", true, alist.contains("c"));

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/InvertIndexTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/InvertIndexTest.java b/library/src/test/java/com/datatorrent/lib/algo/InvertIndexTest.java
index b19d1dd..7de8e42 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/InvertIndexTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/InvertIndexTest.java
@@ -38,7 +38,7 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class InvertIndexTest
 {
-    private static Logger log = LoggerFactory.getLogger(InvertIndexTest.class);
+  private static Logger log = LoggerFactory.getLogger(InvertIndexTest.class);
 
   /**
    * Test oper logic emits correct results
@@ -86,13 +86,13 @@ public class InvertIndexTest
           Assert.assertEquals("Index for \"str1\" contains \"a\"", true, alist.contains("a"));
           Assert.assertEquals("Index for \"str1\" contains \"b\"", true, alist.contains("b"));
           Assert.assertEquals("Index for \"str1\" contains \"c\"", true, alist.contains("c"));
-        }
-        else if (key.equals("str")) {
+
+        } else if (key.equals("str")) {
           Assert.assertEquals("Index for \"str1\" contains \"a\"", true, alist.contains("a"));
           Assert.assertEquals("Index for \"str1\" contains \"b\"", true, alist.contains("b"));
           Assert.assertEquals("Index for \"str1\" contains \"c\"", false, alist.contains("c"));
-        }
-        else if (key.equals("blah")) {
+
+        } else if (key.equals("blah")) {
           Assert.assertEquals("Index for \"str1\" contains \"a\"", false, alist.contains("a"));
           Assert.assertEquals("Index for \"str1\" contains \"b\"", false, alist.contains("b"));
           Assert.assertEquals("Index for \"str1\" contains \"c\"", true, alist.contains("c"));

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyMapTest.java
index 0fd0d96..8da56ed 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyMapTest.java
@@ -96,12 +96,11 @@ public class LeastFrequentKeyMapTest
     list = (ArrayList<HashMap<String, Integer>>)listSink.tuple;
     int acount = 0;
     int ccount = 0;
-    for (HashMap<String, Integer> h: list) {
+    for (HashMap<String, Integer> h : list) {
       val = h.get("a");
       if (val == null) {
         ccount = h.get("c");
-      }
-      else {
+      } else {
         acount = val;
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyTest.java b/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyTest.java
index 3bc18be..2911f20 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyTest.java
@@ -61,11 +61,11 @@ public class LeastFrequentKeyTest
     }
     oper.endWindow();
     Assert.assertEquals("number emitted tuples", 1, matchSink.count);
-    HashMap<String, Integer> tuple = (HashMap<String, Integer>) matchSink.tuple;
+    HashMap<String, Integer> tuple = (HashMap<String, Integer>)matchSink.tuple;
     Integer val = tuple.get("b");
     Assert.assertEquals("Count of b was ", btot, val.intValue());
     Assert.assertEquals("number emitted tuples", 1, listSink.count);
-    ArrayList<HashMap<String,Integer>> list = (ArrayList<HashMap<String,Integer>>) listSink.tuple;
+    ArrayList<HashMap<String, Integer>> list = (ArrayList<HashMap<String, Integer>>)listSink.tuple;
     val = list.get(0).get("b");
     Assert.assertEquals("Count of b was ", btot, val.intValue());
 
@@ -87,22 +87,21 @@ public class LeastFrequentKeyTest
     oper.endWindow();
     Assert.assertEquals("number emitted tuples", 1, matchSink.count);
     Assert.assertEquals("number emitted tuples", 1, listSink.count);
-    list = (ArrayList<HashMap<String,Integer>>) listSink.tuple;
+    list = (ArrayList<HashMap<String,Integer>>)listSink.tuple;
     int acount = 0;
     int ccount = 0;
-    for (HashMap<String,Integer> h : list) {
+    for (HashMap<String, Integer> h : list) {
       val = h.get("a");
       if (val == null) {
         ccount = h.get("c");
-      }
-      else {
+      } else {
         acount = val;
       }
     }
     Assert.assertEquals("Count of a was ", atot, acount);
     Assert.assertEquals("Count of c was ", ctot, ccount);
-    HashMap<String,Integer> mtuple = (HashMap<String,Integer>) matchSink.tuple;
-    val =mtuple.get("a");
+    HashMap<String, Integer> mtuple = (HashMap<String, Integer>)matchSink.tuple;
+    val = mtuple.get("a");
     if (val == null) {
       val = mtuple.get("c");
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyValueMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyValueMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyValueMapTest.java
index 2a639da..955a901 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyValueMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/LeastFrequentKeyValueMapTest.java
@@ -91,14 +91,14 @@ public class LeastFrequentKeyValueMapTest
         if (key.equals("a")) {
           vcount = e.getValue().get(5);
           Assert.assertEquals("Key \"a\" has value ", 4, vcount);
-        }
-        else if (key.equals("b")) {
+
+        } else if (key.equals("b")) {
           vcount = e.getValue().get(2);
           Assert.assertEquals("Key \"a\" has value ", 3, vcount);
           vcount = e.getValue().get(4);
           Assert.assertEquals("Key \"a\" has value ", 3, vcount);
-        }
-        else if (key.equals("c")) {
+
+        } else if (key.equals("c")) {
           vcount = e.getValue().get(4);
           Assert.assertEquals("Key \"a\" has value ", 6, vcount);
         }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/MatchAllMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/MatchAllMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/MatchAllMapTest.java
index 9863832..15e7b3a 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/MatchAllMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/MatchAllMapTest.java
@@ -66,7 +66,7 @@ public class MatchAllMapTest
     oper.endWindow();
 
     Assert.assertEquals("number emitted tuples", 1, matchSink.count);
-    Boolean result = (Boolean) matchSink.tuple;
+    Boolean result = (Boolean)matchSink.tuple;
     Assert.assertEquals("result was false", true, result);
     matchSink.clear();
 
@@ -80,7 +80,7 @@ public class MatchAllMapTest
     oper.data.process(input);
     oper.endWindow();
     Assert.assertEquals("number emitted tuples", 1, matchSink.count);
-    result = (Boolean) matchSink.tuple;
+    result = (Boolean)matchSink.tuple;
     Assert.assertEquals("result was false", false, result);
     matchSink.clear();
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/MatchAnyMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/MatchAnyMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/MatchAnyMapTest.java
index c7c239b..5dfd9ea 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/MatchAnyMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/MatchAnyMapTest.java
@@ -67,7 +67,7 @@ public class MatchAnyMapTest
     oper.endWindow();
 
     Assert.assertEquals("number emitted tuples", 1, matchSink.count);
-    Boolean result = (Boolean) matchSink.tuple;
+    Boolean result = (Boolean)matchSink.tuple;
     Assert.assertEquals("result was false", true, result);
     matchSink.clear();
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/MatchMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/MatchMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/MatchMapTest.java
index f869316..ecd5e9b 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/MatchMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/MatchMapTest.java
@@ -68,14 +68,12 @@ public class MatchMapTest
 
     // One for each key
     Assert.assertEquals("number emitted tuples", 1, matchSink.count);
-    for (Map.Entry<String, Number> e: ((HashMap<String, Number>) matchSink.tuple).entrySet()) {
+    for (Map.Entry<String, Number> e : ((HashMap<String, Number>)matchSink.tuple).entrySet()) {
       if (e.getKey().equals("a")) {
         Assert.assertEquals("emitted value for 'a' was ", new Double(2), new Double(e.getValue().doubleValue()));
-      }
-      else if (e.getKey().equals("b")) {
+      } else if (e.getKey().equals("b")) {
         Assert.assertEquals("emitted tuple for 'b' was ", new Double(20), new Double(e.getValue().doubleValue()));
-      }
-      else if (e.getKey().equals("c")) {
+      } else if (e.getKey().equals("c")) {
         Assert.assertEquals("emitted tuple for 'c' was ", new Double(1000), new Double(e.getValue().doubleValue()));
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/MergeSortNumberTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/MergeSortNumberTest.java b/library/src/test/java/com/datatorrent/lib/algo/MergeSortNumberTest.java
index 471b157..4701957 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/MergeSortNumberTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/MergeSortNumberTest.java
@@ -18,8 +18,6 @@
  */
 package com.datatorrent.lib.algo;
 
-import static org.junit.Assert.assertTrue;
-
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.Random;
@@ -28,6 +26,8 @@ import org.junit.Test;
 
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
+import static org.junit.Assert.assertTrue;
+
 /**
  *
  * Functional tests for {@link com.datatorrent.lib.algo.MergeSort}<p>
@@ -38,38 +38,40 @@ public class MergeSortNumberTest
    * Test node logic emits correct results
    */
   @SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @Test
   public void testNodeProcessing() throws Exception
   {
-  	MergeSortNumber<Integer> oper = new MergeSortNumber<Integer>();
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.sort.setSink(sink);
+    MergeSortNumber<Integer> oper = new MergeSortNumber<Integer>();
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.sort.setSink(sink);
 
-  	oper.setup(null);
-  	oper.beginWindow(1);
+    oper.setup(null);
+    oper.beginWindow(1);
 
-  	Random rand = new Random();
-  	ArrayList<Integer> tuple = new ArrayList<Integer>();
-  	tuple.add(rand.nextInt(50));
-  	tuple.add(50 + rand.nextInt(50));
-  	oper.process(tuple);
-  	tuple = new ArrayList<Integer>();
-  	tuple.add(rand.nextInt(50));
-  	tuple.add(50 + rand.nextInt(50));
-  	oper.process(tuple);
+    Random rand = new Random();
+    ArrayList<Integer> tuple = new ArrayList<Integer>();
+    tuple.add(rand.nextInt(50));
+    tuple.add(50 + rand.nextInt(50));
+    oper.process(tuple);
+    tuple = new ArrayList<Integer>();
+    tuple.add(rand.nextInt(50));
+    tuple.add(50 + rand.nextInt(50));
+    oper.process(tuple);
 
-  	oper.endWindow();
-  	oper.teardown();
+    oper.endWindow();
+    oper.teardown();
 
-  	assertTrue("Tuples in sink", sink.collectedTuples.size() == 1);
-  	Iterator iter = sink.collectedTuples.iterator();
-  	if (!iter.hasNext()) return;
-  	tuple = (ArrayList<Integer>) iter.next();
-  	assertTrue("Tuple size 4", tuple.size() == 4);
-  	Integer val = tuple.get(0);
-  	for(int i=1; i < 4; i++) {
-  		assertTrue("Values must be sorted " + tuple, val <= tuple.get(i));
-  		val = tuple.get(i);
-  	}
+    assertTrue("Tuples in sink", sink.collectedTuples.size() == 1);
+    Iterator iter = sink.collectedTuples.iterator();
+    if (!iter.hasNext()) {
+      return;
+    }
+    tuple = (ArrayList<Integer>)iter.next();
+    assertTrue("Tuple size 4", tuple.size() == 4);
+    Integer val = tuple.get(0);
+    for (int i = 1; i < 4; i++) {
+      assertTrue("Values must be sorted " + tuple, val <= tuple.get(i));
+      val = tuple.get(i);
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyMapTest.java
index 720e74c..6851e4a 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyMapTest.java
@@ -97,12 +97,11 @@ public class MostFrequentKeyMapTest
     list = (ArrayList<HashMap<String, Integer>>)listSink.tuple;
     int acount = 0;
     int ccount = 0;
-    for (HashMap<String, Integer> h: list) {
+    for (HashMap<String, Integer> h : list) {
       val = h.get("a");
       if (val == null) {
         ccount = h.get("c");
-      }
-      else {
+      } else {
         acount = val;
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyTest.java b/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyTest.java
index 3ba9a50..d2c1cb1 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyTest.java
@@ -62,11 +62,11 @@ public class MostFrequentKeyTest
     }
     oper.endWindow();
     Assert.assertEquals("number emitted tuples", 1, matchSink.count);
-    HashMap<String, Integer> tuple = (HashMap<String, Integer>) matchSink.tuple;
+    HashMap<String, Integer> tuple = (HashMap<String, Integer>)matchSink.tuple;
     Integer val = tuple.get("b");
     Assert.assertEquals("Count of b was ", btot, val.intValue());
     Assert.assertEquals("number emitted tuples", 1, listSink.count);
-    ArrayList<HashMap<String,Integer>> list = (ArrayList<HashMap<String,Integer>>) listSink.tuple;
+    ArrayList<HashMap<String, Integer>> list = (ArrayList<HashMap<String, Integer>>)listSink.tuple;
     val = list.get(0).get("b");
     Assert.assertEquals("Count of b was ", btot, val.intValue());
 
@@ -88,22 +88,21 @@ public class MostFrequentKeyTest
     oper.endWindow();
     Assert.assertEquals("number emitted tuples", 1, matchSink.count);
     Assert.assertEquals("number emitted tuples", 1, listSink.count);
-    list = (ArrayList<HashMap<String,Integer>>) listSink.tuple;
+    list = (ArrayList<HashMap<String, Integer>>)listSink.tuple;
     int acount = 0;
     int ccount = 0;
-    for (HashMap<String,Integer> h : list) {
+    for (HashMap<String, Integer> h : list) {
       val = h.get("a");
       if (val == null) {
         ccount = h.get("c");
-      }
-      else {
+      } else {
         acount = val;
       }
     }
     Assert.assertEquals("Count of a was ", atot, acount);
     Assert.assertEquals("Count of c was ", ctot, ccount);
-    HashMap<String,Integer> mtuple = (HashMap<String,Integer>) matchSink.tuple;
-    val =mtuple.get("a");
+    HashMap<String, Integer> mtuple = (HashMap<String, Integer>)matchSink.tuple;
+    val = mtuple.get("a");
     if (val == null) {
       val = mtuple.get("c");
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyValueMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyValueMapTest.java b/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyValueMapTest.java
index 62e9244..5a72a5c 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyValueMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/MostFrequentKeyValueMapTest.java
@@ -91,14 +91,14 @@ public class MostFrequentKeyValueMapTest
         if (key.equals("a")) {
           vcount = e.getValue().get(1);
           Assert.assertEquals("Key \"a\" has value ", 5, vcount);
-        }
-        else if (key.equals("b")) {
+
+        } else if (key.equals("b")) {
           vcount = e.getValue().get(2);
           Assert.assertEquals("Key \"a\" has value ", 3, vcount);
           vcount = e.getValue().get(4);
           Assert.assertEquals("Key \"a\" has value ", 3, vcount);
-        }
-        else if (key.equals("c")) {
+
+        } else if (key.equals("c")) {
           vcount = e.getValue().get(3);
           Assert.assertEquals("Key \"a\" has value ", 10, vcount);
         }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/TopNTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/TopNTest.java b/library/src/test/java/com/datatorrent/lib/algo/TopNTest.java
index 725b4cf..f9b9f30 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/TopNTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/TopNTest.java
@@ -107,15 +107,15 @@ public class TopNTest
       for (Map.Entry<String, ArrayList<Number>> e: ((HashMap<String, ArrayList<Number>>)o).entrySet()) {
         if (e.getKey().equals("a")) {
           Assert.assertEquals("emitted value for 'a' was ", 3, e.getValue().size());
-        }
-        else if (e.getKey().equals("b")) {
+
+        } else if (e.getKey().equals("b")) {
           Assert.assertEquals("emitted tuple for 'b' was ", 3, e.getValue().size());
-        }
-        else if (e.getKey().equals("c")) {
+
+        } else if (e.getKey().equals("c")) {
           Assert.assertEquals("emitted tuple for 'c' was ", 1, e.getValue().size());
         }
         log.debug(String.format("Sorted list for %s:", e.getKey()));
-        for (Number i: e.getValue()) {
+        for (Number i : e.getValue()) {
           log.debug(String.format("%s", i.toString()));
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/TopNUniqueTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/TopNUniqueTest.java b/library/src/test/java/com/datatorrent/lib/algo/TopNUniqueTest.java
index ee1148a..601cb3b 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/TopNUniqueTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/TopNUniqueTest.java
@@ -94,7 +94,7 @@ public class TopNUniqueTest
     input.clear();
     input.put("b", 6);
     input.put("a", 1001);
-     oper.data.process(input);
+    oper.data.process(input);
 
     input.clear();
     input.put("c", 9);
@@ -108,11 +108,9 @@ public class TopNUniqueTest
       for (Map.Entry<String, ArrayList<HashMap<Number, Integer>>> e: ((HashMap<String, ArrayList<HashMap<Number, Integer>>>)o).entrySet()) {
         if (e.getKey().equals("a")) {
           Assert.assertEquals("emitted value for 'a' was ", 3, e.getValue().size());
-        }
-        else if (e.getKey().equals("b")) {
+        } else if (e.getKey().equals("b")) {
           Assert.assertEquals("emitted tuple for 'b' was ", 3, e.getValue().size());
-        }
-        else if (e.getKey().equals("c")) {
+        } else if (e.getKey().equals("c")) {
           Assert.assertEquals("emitted tuple for 'c' was ", 1, e.getValue().size());
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/UniqueCounterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/UniqueCounterTest.java b/library/src/test/java/com/datatorrent/lib/algo/UniqueCounterTest.java
index b29d1a4..d1fbc02 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/UniqueCounterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/UniqueCounterTest.java
@@ -68,7 +68,7 @@ public class UniqueCounterTest
       }
     }
     oper.endWindow();
-    HashMap<String,Integer> tuple = (HashMap<String,Integer>) sink.tuple;
+    HashMap<String, Integer> tuple = (HashMap<String, Integer>)sink.tuple;
     int acount = tuple.get("a");
     int bcount = tuple.get("b");
     int ccount = tuple.get("c");
@@ -76,9 +76,9 @@ public class UniqueCounterTest
     int ecount = tuple.get("e");
     Assert.assertEquals("number emitted tuples", 1, sink.count);
     Assert.assertEquals("number emitted tuples", numTuples, acount);
-    Assert.assertEquals("number emitted tuples", numTuples/2, bcount);
-    Assert.assertEquals("number emitted tuples", numTuples/3 + 1, ccount);
-    Assert.assertEquals("number emitted tuples", numTuples/5, dcount);
-    Assert.assertEquals("number emitted tuples", numTuples/10, ecount);
+    Assert.assertEquals("number emitted tuples", numTuples / 2, bcount);
+    Assert.assertEquals("number emitted tuples", numTuples / 3 + 1, ccount);
+    Assert.assertEquals("number emitted tuples", numTuples / 5, dcount);
+    Assert.assertEquals("number emitted tuples", numTuples / 10, ecount);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/algo/UniqueValueCountTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/UniqueValueCountTest.java b/library/src/test/java/com/datatorrent/lib/algo/UniqueValueCountTest.java
index b7d0207..c4afc0e 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/UniqueValueCountTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/UniqueValueCountTest.java
@@ -26,6 +26,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.ImmutableMap;
+
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.KeyValPair;
 import com.datatorrent.lib.util.TestUtils;
@@ -35,52 +36,56 @@ import com.datatorrent.lib.util.TestUtils;
  *
  * @since 0.3.5
  */
-public class UniqueValueCountTest {
-    private static Logger LOG = LoggerFactory.getLogger(UniqueValueCountTest.class);
+public class UniqueValueCountTest
+{
+  private static Logger LOG = LoggerFactory.getLogger(UniqueValueCountTest.class);
+
+  @Test
+  public void uniqueCountTest()
+  {
+    UniqueValueCount<String> uniqueCountOper = new UniqueValueCount<String>();
+    CollectorTestSink<KeyValPair<String, Integer>> outputSink =
+        new CollectorTestSink<KeyValPair<String, Integer>>();
+    CollectorTestSink<KeyValPair<String, Set<Object>>> outputSetSink =
+        new CollectorTestSink<KeyValPair<String, Set<Object>>>();
+    TestUtils.setSink(uniqueCountOper.output, outputSink);
+    TestUtils.setSink(uniqueCountOper.outputValues, outputSetSink);
+
+    uniqueCountOper.beginWindow(0);
+    uniqueCountOper.input.process(new KeyValPair<String, Object>("test1", 1));
+    uniqueCountOper.input.process(new KeyValPair<String, Object>("test1", 2));
+    uniqueCountOper.input.process(new KeyValPair<String, Object>("test1", 2));
+    uniqueCountOper.endWindow();
 
+    Assert.assertEquals("number emitted tuples", 1, outputSink.collectedTuples.size());
+    KeyValPair<String, Integer> emittedPair = outputSink.collectedTuples.get(0);
 
-    @Test
-    public void uniqueCountTest(){
-        UniqueValueCount<String> uniqueCountOper= new UniqueValueCount<String>();
-        CollectorTestSink<KeyValPair <String,Integer>> outputSink = new CollectorTestSink<KeyValPair <String,Integer>>();
-        CollectorTestSink<KeyValPair <String,Set<Object>>> outputSetSink = new CollectorTestSink<KeyValPair <String, Set<Object>>>();
-        TestUtils.setSink(uniqueCountOper.output, outputSink);
-        TestUtils.setSink(uniqueCountOper.outputValues, outputSetSink);
-        
-        uniqueCountOper.beginWindow(0);
-        uniqueCountOper.input.process(new KeyValPair<String, Object>("test1",1));
-        uniqueCountOper.input.process(new KeyValPair<String, Object>("test1",2));
-        uniqueCountOper.input.process(new KeyValPair<String, Object>("test1",2));
-        uniqueCountOper.endWindow();
+    Assert.assertEquals("emitted key was ", "test1", emittedPair.getKey());
+    Assert.assertEquals("emitted value was ", 2, emittedPair.getValue().intValue());
 
-        Assert.assertEquals("number emitted tuples", 1, outputSink.collectedTuples.size());
-        KeyValPair<String,Integer> emittedPair= outputSink.collectedTuples.get(0);
-        
-        Assert.assertEquals("emitted key was ", "test1", emittedPair.getKey());
-        Assert.assertEquals("emitted value was ",2, emittedPair.getValue().intValue());
+    Assert.assertEquals("number emitted tuples", 1, outputSetSink.collectedTuples.size());
+    KeyValPair<String, Set<Object>> emittedSetPair = outputSetSink.collectedTuples.get(0);
+    Assert.assertTrue(emittedSetPair.getValue().contains(1));
+    Assert.assertTrue(emittedSetPair.getValue().contains(2));
 
-        Assert.assertEquals("number emitted tuples", 1, outputSetSink.collectedTuples.size());
-        KeyValPair<String,Set<Object>> emittedSetPair= outputSetSink.collectedTuples.get(0);
-        Assert.assertTrue(emittedSetPair.getValue().contains(1));
-        Assert.assertTrue(emittedSetPair.getValue().contains(2));
-        
-        outputSink.clear();
-        uniqueCountOper.beginWindow(1);
-        uniqueCountOper.input.process(new KeyValPair<String,Object>("test1",1));
-        uniqueCountOper.input.process(new KeyValPair<String, Object>("test1",2));
-        uniqueCountOper.input.process(new KeyValPair<String, Object>("test1",2));
-        uniqueCountOper.input.process(new KeyValPair<String, Object>("test2",1));
-        uniqueCountOper.input.process(new KeyValPair<String, Object>("test2",2));
-        uniqueCountOper.input.process(new KeyValPair<String, Object>("test2",2));
-        uniqueCountOper.endWindow();
+    outputSink.clear();
+    uniqueCountOper.beginWindow(1);
+    uniqueCountOper.input.process(new KeyValPair<String, Object>("test1", 1));
+    uniqueCountOper.input.process(new KeyValPair<String, Object>("test1", 2));
+    uniqueCountOper.input.process(new KeyValPair<String, Object>("test1", 2));
+    uniqueCountOper.input.process(new KeyValPair<String, Object>("test2", 1));
+    uniqueCountOper.input.process(new KeyValPair<String, Object>("test2", 2));
+    uniqueCountOper.input.process(new KeyValPair<String, Object>("test2", 2));
+    uniqueCountOper.endWindow();
 
-        ImmutableMap<String,Integer> answers=ImmutableMap.of("test1",2,"test2",2);
+    ImmutableMap<String, Integer> answers = ImmutableMap.of("test1", 2, "test2", 2);
 
-        Assert.assertEquals("number emitted tuples", 2, outputSink.collectedTuples.size());
-        for(KeyValPair<String,Integer> emittedPair2: outputSink.collectedTuples) {
-            Assert.assertEquals("emmit value of "+ emittedPair2.getKey() +" was ", answers.get(emittedPair2.getKey()), emittedPair2.getValue());
-        }
-        LOG.debug("Done unique count testing testing\n") ;
+    Assert.assertEquals("number emitted tuples", 2, outputSink.collectedTuples.size());
+    for (KeyValPair<String, Integer> emittedPair2 : outputSink.collectedTuples) {
+      Assert.assertEquals("emmit value of " + emittedPair2.getKey() + " was ", answers.get(emittedPair2.getKey()),
+          emittedPair2.getValue());
     }
+    LOG.debug("Done unique count testing testing\n");
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlushTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlushTest.java b/library/src/test/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlushTest.java
index 354d4ae..74a7b46 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlushTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlushTest.java
@@ -20,11 +20,11 @@ package com.datatorrent.lib.appdata.datastructs;
 
 import java.util.List;
 
-import com.google.common.collect.Lists;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+
 import com.datatorrent.lib.appdata.datastructs.CacheLRUSynchronousFlush.CacheFlushListener;
 
 public class CacheLRUSynchronousFlushTest
@@ -35,8 +35,8 @@ public class CacheLRUSynchronousFlushTest
     final Integer key = 1;
     final Integer value = 1;
 
-    CacheLRUSynchronousFlush<Integer, Integer> cache =
-    new CacheLRUSynchronousFlush<Integer, Integer>(new NOPCacheFlushListener<Integer, Integer>());
+    CacheLRUSynchronousFlush<Integer, Integer> cache = new CacheLRUSynchronousFlush<>(
+        new NOPCacheFlushListener<Integer, Integer>());
 
     cache.put(key, value);
 
@@ -103,8 +103,7 @@ public class CacheLRUSynchronousFlushTest
 
     try {
       Thread.sleep(10);
-    }
-    catch(InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/datastructs/DimensionalTableTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/datastructs/DimensionalTableTest.java b/library/src/test/java/com/datatorrent/lib/appdata/datastructs/DimensionalTableTest.java
index db74007..57cc0f2 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/datastructs/DimensionalTableTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/datastructs/DimensionalTableTest.java
@@ -21,13 +21,13 @@ package com.datatorrent.lib.appdata.datastructs;
 import java.util.Map;
 import java.util.Set;
 
+import org.junit.Assert;
+import org.junit.Test;
+
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
-import org.junit.Assert;
-import org.junit.Test;
-
 import com.datatorrent.lib.util.KryoCloneUtils;
 
 public class DimensionalTableTest
@@ -56,7 +56,7 @@ public class DimensionalTableTest
     DimensionalTable<Integer> table = createTestTable();
 
     Integer point = table.getDataPoint(Lists.newArrayList("google", "taco bell", "Ukraine"));
-    Assert.assertEquals((Integer) 6, point);
+    Assert.assertEquals((Integer)6, point);
 
     Map<String, String> selectionValues = Maps.newHashMap();
     selectionValues.put("publisher", "amazon");
@@ -64,7 +64,7 @@ public class DimensionalTableTest
     selectionValues.put("location", "Czech");
 
     point = table.getDataPoint(selectionValues);
-    Assert.assertEquals((Integer) 7, point);
+    Assert.assertEquals((Integer)7, point);
   }
 
   @Test
@@ -103,9 +103,7 @@ public class DimensionalTableTest
   @Test
   public void duplicateAppendTest()
   {
-    DimensionalTable<Integer> table = new DimensionalTable<Integer>(Lists.newArrayList("publisher",
-                                                                                       "advertiser",
-                                                                                       "location"));
+    DimensionalTable<Integer> table = new DimensionalTable<Integer>(Lists.newArrayList("publisher", "advertiser", "location"));
 
     table.appendRow(1, "google", "starbucks", "CA");
     table.appendRow(2, "google", "starbucks", "CA");
@@ -146,9 +144,7 @@ public class DimensionalTableTest
 
   private DimensionalTable<Integer> createTestTable()
   {
-    DimensionalTable<Integer> table = new DimensionalTable<Integer>(Lists.newArrayList("publisher",
-                                                                                       "advertiser",
-                                                                                       "location"));
+    DimensionalTable<Integer> table = new DimensionalTable<Integer>(Lists.newArrayList("publisher", "advertiser", "location"));
 
     table.appendRow(1, "google", "starbucks", "CA");
     table.appendRow(2, "amazon", "walmart", "NY");

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOMutableTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOMutableTest.java b/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOMutableTest.java
index e4a43ec..1617a75 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOMutableTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOMutableTest.java
@@ -20,11 +20,11 @@ package com.datatorrent.lib.appdata.gpo;
 
 import java.util.Map;
 
-import com.google.common.collect.Maps;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.appdata.schemas.FieldsDescriptor;
 import com.datatorrent.lib.appdata.schemas.Type;
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOUtilsTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOUtilsTest.java b/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOUtilsTest.java
index 0e1aee3..4a6fd05 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOUtilsTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/gpo/GPOUtilsTest.java
@@ -67,14 +67,14 @@ public class GPOUtilsTest
     final Long tlongv = 10000000000L;
 
     int totalBytes = 1 //boolean
-                     + 2 //char
-                     + 4 + tstringv.getBytes().length //string
-                     + 4 //float
-                     + 8 //double
-                     + 1 //byte
-                     + 2 //short
-                     + 4 //int
-                     + 8; //long
+        + 2 //char
+        + 4 + tstringv.getBytes().length //string
+        + 4 //float
+        + 8 //double
+        + 1 //byte
+        + 2 //short
+        + 4 //int
+        + 8; //long
 
     fieldToType.put(tboolean, Type.BOOLEAN);
     fieldToType.put(tchar, Type.CHAR);
@@ -144,14 +144,14 @@ public class GPOUtilsTest
     final Long tlongv = 10000000000L;
 
     int totalBytes = 1 //boolean
-                     + 2 //char
-                     + 4 + tstringv.getBytes().length //string
-                     + 4 //float
-                     + 8 //double
-                     + 1 //byte
-                     + 2 //short
-                     + 4 //int
-                     + 8; //long
+        + 2 //char
+        + 4 + tstringv.getBytes().length //string
+        + 4 //float
+        + 8 //double
+        + 1 //byte
+        + 2 //short
+        + 4 //int
+        + 8; //long
 
     logger.debug("Correct total bytes {}.", totalBytes);
 
@@ -303,9 +303,7 @@ public class GPOUtilsTest
     fieldToSerde.put("keys", SerdeListGPOMutable.INSTANCE);
     fieldToSerde.put("values", SerdeListGPOMutable.INSTANCE);
 
-    FieldsDescriptor metaDataFD = new FieldsDescriptor(fieldToType,
-                                                       fieldToSerde,
-                                                       new PayloadFix());
+    FieldsDescriptor metaDataFD = new FieldsDescriptor(fieldToType, fieldToSerde, new PayloadFix());
 
     GPOMutable gpo = new GPOMutable(metaDataFD);
 
@@ -348,13 +346,13 @@ public class GPOUtilsTest
     @Override
     public void fix(Object[] objects)
     {
-      FieldsDescriptor keyfd = (FieldsDescriptor) objects[0];
-      FieldsDescriptor valuefd = (FieldsDescriptor) objects[1];
+      FieldsDescriptor keyfd = (FieldsDescriptor)objects[0];
+      FieldsDescriptor valuefd = (FieldsDescriptor)objects[1];
 
       @SuppressWarnings("unchecked")
-      List<GPOMutable> keyMutables = (List<GPOMutable>) objects[2];
+      List<GPOMutable> keyMutables = (List<GPOMutable>)objects[2];
       @SuppressWarnings("unchecked")
-      List<GPOMutable> aggregateMutables = (List<GPOMutable>) objects[3];
+      List<GPOMutable> aggregateMutables = (List<GPOMutable>)objects[3];
 
       fix(keyfd, keyMutables);
       fix(valuefd, aggregateMutables);
@@ -362,9 +360,7 @@ public class GPOUtilsTest
 
     private void fix(FieldsDescriptor fd, List<GPOMutable> mutables)
     {
-      for(int index = 0;
-          index < mutables.size();
-          index++) {
+      for (int index = 0; index < mutables.size(); index++) {
         mutables.get(index).setFieldDescriptor(fd);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptorTest.java b/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptorTest.java
index a7d6afc..2aeb110 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptorTest.java
@@ -20,13 +20,13 @@ package com.datatorrent.lib.appdata.gpo;
 
 import java.util.Map;
 
-import com.google.common.collect.Maps;
-
 import org.junit.Assert;
 import org.junit.Test;
 
 import org.apache.commons.lang3.mutable.MutableInt;
 
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.appdata.schemas.FieldsDescriptor;
 import com.datatorrent.lib.appdata.schemas.Type;
 
@@ -43,7 +43,8 @@ public class SerdeFieldsDescriptorTest
     FieldsDescriptor fd = new FieldsDescriptor(fieldToType);
 
     byte[] bytes = SerdeFieldsDescriptor.INSTANCE.serializeObject(fd);
-    FieldsDescriptor newfd = (FieldsDescriptor) SerdeFieldsDescriptor.INSTANCE.deserializeObject(bytes, new MutableInt(0));
+    FieldsDescriptor newfd = (FieldsDescriptor)SerdeFieldsDescriptor.INSTANCE.deserializeObject(bytes,
+        new MutableInt(0));
 
     Assert.assertEquals(fd, newfd);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutableTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutableTest.java b/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutableTest.java
index ec37359..324e352 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutableTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutableTest.java
@@ -21,14 +21,14 @@ package com.datatorrent.lib.appdata.gpo;
 import java.util.List;
 import java.util.Map;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
 import org.junit.Assert;
 import org.junit.Test;
 
 import org.apache.commons.lang3.mutable.MutableInt;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.appdata.schemas.FieldsDescriptor;
 import com.datatorrent.lib.appdata.schemas.Type;
 
@@ -109,8 +109,7 @@ public class SerdeListGPOMutableTest
     MutableInt offset = new MutableInt(0);
 
     @SuppressWarnings("unchecked")
-    List<GPOMutable> newMutables =
-    (List<GPOMutable>) SerdeListGPOMutable.INSTANCE.deserializeObject(bytes, offset);
+    List<GPOMutable> newMutables = (List<GPOMutable>)SerdeListGPOMutable.INSTANCE.deserializeObject(bytes, offset);
 
     Assert.assertEquals(mutables, newMutables);
     Assert.assertEquals(bytes.length, offset.intValue());

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitiveTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitiveTest.java b/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitiveTest.java
index 6445e05..ef6255d 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitiveTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitiveTest.java
@@ -20,13 +20,13 @@ package com.datatorrent.lib.appdata.gpo;
 
 import java.util.List;
 
-import com.google.common.collect.Lists;
-
 import org.junit.Assert;
 import org.junit.Test;
 
 import org.apache.commons.lang3.mutable.MutableInt;
 
+import com.google.common.collect.Lists;
+
 public class SerdeListPrimitiveTest
 {
   @Test
@@ -35,11 +35,11 @@ public class SerdeListPrimitiveTest
     GPOByteArrayList bal = new GPOByteArrayList();
 
     List<Object> primitiveList = Lists.newArrayList();
-    primitiveList.add((Boolean) true);
-    primitiveList.add((Byte) ((byte) 5));
-    primitiveList.add((Short) ((short) 16000));
-    primitiveList.add((Integer) 25000000);
-    primitiveList.add((Long) 5000000000L);
+    primitiveList.add(true);
+    primitiveList.add(((byte)5));
+    primitiveList.add(((short)16000));
+    primitiveList.add(25000000);
+    primitiveList.add(5000000000L);
     primitiveList.add('a');
     primitiveList.add("tim is the coolest");
 
@@ -50,7 +50,8 @@ public class SerdeListPrimitiveTest
     bal.add(new byte[13]);
 
     @SuppressWarnings("unchecked")
-    List<Object> newPrimitiveList = (List<Object>) SerdeListPrimitive.INSTANCE.deserializeObject(bal.toByteArray(), new MutableInt(15));
+    List<Object> newPrimitiveList = (List<Object>)SerdeListPrimitive.INSTANCE.deserializeObject(bal.toByteArray(),
+        new MutableInt(15));
 
     Assert.assertEquals(primitiveList, newPrimitiveList);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListStringTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListStringTest.java b/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListStringTest.java
index 356ef83..33d2286 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListStringTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/gpo/SerdeListStringTest.java
@@ -20,13 +20,13 @@ package com.datatorrent.lib.appdata.gpo;
 
 import java.util.List;
 
-import com.google.common.collect.Lists;
-
 import org.junit.Assert;
 import org.junit.Test;
 
 import org.apache.commons.lang3.mutable.MutableInt;
 
+import com.google.common.collect.Lists;
+
 public class SerdeListStringTest
 {
   @Test
@@ -49,7 +49,7 @@ public class SerdeListStringTest
 
     @SuppressWarnings("unchecked")
     List<String> deserializedList =
-    (List<String>) sls.deserializeObject(gpoBytes.toByteArray(), intVals);
+        (List<String>)sls.deserializeObject(gpoBytes.toByteArray(), intVals);
 
     Assert.assertEquals(testList, deserializedList);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/MockQuery.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/MockQuery.java b/library/src/test/java/com/datatorrent/lib/appdata/query/MockQuery.java
index 208e7d5..eaa5a88 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/MockQuery.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/MockQuery.java
@@ -18,10 +18,10 @@
  */
 package com.datatorrent.lib.appdata.query;
 
-import com.datatorrent.lib.appdata.schemas.Query;
-
 import java.util.Map;
 
+import com.datatorrent.lib.appdata.schemas.Query;
+
 public class MockQuery extends Query
 {
   public static final String TYPE = "mockQuery";
@@ -31,21 +31,17 @@ public class MockQuery extends Query
     super(id, TYPE);
   }
 
-  public MockQuery(String id,
-                   Map<String, String> schemaKeys)
+  public MockQuery(String id, Map<String, String> schemaKeys)
   {
     super(id, TYPE, schemaKeys);
   }
 
-  public MockQuery(String id,
-                   long countdown)
+  public MockQuery(String id, long countdown)
   {
     super(id, TYPE, countdown);
   }
 
-  public MockQuery(String id,
-                   long countdown,
-                   Map<String, String> schemaKeys)
+  public MockQuery(String id, long countdown, Map<String, String> schemaKeys)
   {
     super(id, TYPE, countdown, schemaKeys);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/MockResult.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/MockResult.java b/library/src/test/java/com/datatorrent/lib/appdata/query/MockResult.java
index e19464e..50358b5 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/MockResult.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/MockResult.java
@@ -23,8 +23,8 @@ import com.datatorrent.lib.appdata.query.serde.MessageType;
 import com.datatorrent.lib.appdata.schemas.Query;
 import com.datatorrent.lib.appdata.schemas.Result;
 
-@MessageType(type=MockResult.TYPE)
-@MessageSerializerInfo(clazz=MockResultSerializer.class)
+@MessageType(type = MockResult.TYPE)
+@MessageSerializerInfo(clazz = MockResultSerializer.class)
 public class MockResult extends Result
 {
   public static final String TYPE = "dataResult";
@@ -34,10 +34,8 @@ public class MockResult extends Result
     super(query);
   }
 
-  public MockResult(Query query,
-                    long countdown)
+  public MockResult(Query query, long countdown)
   {
-    super(query,
-          countdown);
+    super(query, countdown);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/MockResultSerializer.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/MockResultSerializer.java b/library/src/test/java/com/datatorrent/lib/appdata/query/MockResultSerializer.java
index 873157b..b7d62ef 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/MockResultSerializer.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/MockResultSerializer.java
@@ -37,15 +37,14 @@ public class MockResultSerializer implements CustomMessageSerializer
   {
     try {
       return serializeHelper(message, resultFormatter);
-    }
-    catch(JSONException e) {
+    } catch (JSONException e) {
       throw new RuntimeException(e);
     }
   }
 
   private String serializeHelper(Message message, ResultFormatter resultFormatter) throws JSONException
   {
-    MockResult result = (MockResult) message;
+    MockResult result = (MockResult)message;
 
     JSONObject jo = new JSONObject();
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronousTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronousTest.java b/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronousTest.java
index 3b39971..5b07dd7 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronousTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronousTest.java
@@ -30,13 +30,12 @@ import org.slf4j.LoggerFactory;
 
 import org.apache.commons.lang3.mutable.MutableLong;
 
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.lib.appdata.query.serde.MessageSerializerFactory;
 import com.datatorrent.lib.appdata.schemas.ResultFormatter;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.TestUtils;
 
-import com.datatorrent.api.DefaultOutputPort;
-
 public class QueryManagerAsynchronousTest
 {
   @Rule
@@ -55,8 +54,8 @@ public class QueryManagerAsynchronousTest
     {
       try {
         Thread.sleep(200);
-      }
-      catch(InterruptedException ex) {
+      } catch (InterruptedException ex) {
+        //noop
       }
       Thread.interrupted();
     }
@@ -77,17 +76,11 @@ public class QueryManagerAsynchronousTest
 
     MessageSerializerFactory msf = new MessageSerializerFactory(new ResultFormatter());
 
-    QueryManagerAsynchronous<MockQuery, Void, MutableLong, MockResult> queryManagerAsynch = new
-    QueryManagerAsynchronous<MockQuery, Void, MutableLong, MockResult>(outputPort,
-                                                                       queueManager,
-                                                                       new NOPQueryExecutor(waitMillisProb),
-                                                                       msf,
-                                                                       Thread.currentThread());
-
-    Thread producerThread = new Thread(new ProducerThread(queueManager,
-                                                          totalTuples,
-                                                          batchSize,
-                                                          waitMillisProb));
+    QueryManagerAsynchronous<MockQuery, Void, MutableLong, MockResult> queryManagerAsynch =
+        new QueryManagerAsynchronous<>(outputPort, queueManager, new NOPQueryExecutor(waitMillisProb), msf,
+        Thread.currentThread());
+
+    Thread producerThread = new Thread(new ProducerThread(queueManager, totalTuples, batchSize, waitMillisProb));
     producerThread.start();
     producerThread.setName("Producer Thread");
 
@@ -96,10 +89,9 @@ public class QueryManagerAsynchronousTest
     queryManagerAsynch.setup(null);
 
     int numWindows = 0;
-    for(;
-        sink.collectedTuples.size() < totalTuples
-        && ((System.currentTimeMillis() - startTime) < 60000)
-        ;numWindows++) {
+
+    for (; sink.collectedTuples.size() < totalTuples && ((System.currentTimeMillis() - startTime) < 60000);
+        numWindows++) {
       queryManagerAsynch.beginWindow(numWindows);
       Thread.sleep(100);
       queryManagerAsynch.endWindow();
@@ -110,8 +102,7 @@ public class QueryManagerAsynchronousTest
 
     try {
       Thread.sleep(1000);
-    }
-    catch(InterruptedException e) {
+    } catch (InterruptedException e) {
       //Do Nothing
     }
 
@@ -131,11 +122,10 @@ public class QueryManagerAsynchronousTest
     @Override
     public MockResult executeQuery(MockQuery query, Void metaQuery, MutableLong queueContext)
     {
-      if(rand.nextDouble() < waitMillisProb) {
+      if (rand.nextDouble() < waitMillisProb) {
         try {
           Thread.sleep(1);
-        }
-        catch(InterruptedException ex) {
+        } catch (InterruptedException ex) {
           throw new RuntimeException(ex);
         }
       }
@@ -152,10 +142,8 @@ public class QueryManagerAsynchronousTest
     private final double waitMillisProb;
     private final Random rand = new Random();
 
-    public ProducerThread(AppDataWindowEndQueueManager<MockQuery, Void> queueManager,
-                          int totalTuples,
-                          int batchSize,
-                          double waitMillisProb)
+    public ProducerThread(AppDataWindowEndQueueManager<MockQuery, Void> queueManager, int totalTuples, int batchSize,
+        double waitMillisProb)
     {
       this.queueManager = queueManager;
       this.totalTuples = totalTuples;
@@ -168,20 +156,14 @@ public class QueryManagerAsynchronousTest
     {
       int numLoops = totalTuples / batchSize;
 
-      for(int loopCounter = 0, tupleCounter = 0;
-          loopCounter < numLoops;
-          loopCounter++, tupleCounter++) {
-        for(int batchCounter = 0;
-            batchCounter < batchSize;
-            batchCounter++,
-            tupleCounter++) {
+      for (int loopCounter = 0, tupleCounter = 0; loopCounter < numLoops; loopCounter++, tupleCounter++) {
+        for (int batchCounter = 0; batchCounter < batchSize; batchCounter++, tupleCounter++) {
           queueManager.enqueue(new MockQuery(tupleCounter + ""), null, new MutableLong(1L));
 
-          if(rand.nextDouble() < waitMillisProb) {
+          if (rand.nextDouble() < waitMillisProb) {
             try {
               Thread.sleep(1);
-            }
-            catch(InterruptedException ex) {
+            } catch (InterruptedException ex) {
               throw new RuntimeException(ex);
             }
           }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronousTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronousTest.java b/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronousTest.java
index 17df222..1c0e06c 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronousTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronousTest.java
@@ -20,11 +20,11 @@ package com.datatorrent.lib.appdata.query;
 
 import java.util.List;
 
-import com.google.common.collect.Lists;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+
 import com.datatorrent.lib.appdata.schemas.Query;
 import com.datatorrent.lib.appdata.schemas.Result;
 
@@ -35,14 +35,13 @@ public class QueryManagerSynchronousTest
   {
     final int numQueries = 3;
 
-    QueryManagerSynchronous<Query, Void, Void, Result> queryProcessor = QueryManagerSynchronous.newInstance(new SimpleQueryComputer());
+    QueryManagerSynchronous<Query, Void, Void, Result> queryProcessor = QueryManagerSynchronous.newInstance(
+        new SimpleQueryComputer());
 
     queryProcessor.setup(null);
     queryProcessor.beginWindow(0);
 
-    for(int qc = 0;
-        qc < numQueries;
-        qc++) {
+    for (int qc = 0; qc < numQueries; qc++) {
       Query query = new MockQuery(Integer.toString(qc));
       queryProcessor.enqueue(query, null, null);
     }
@@ -50,7 +49,7 @@ public class QueryManagerSynchronousTest
     Result result;
     List<Result> results = Lists.newArrayList();
 
-    while((result = queryProcessor.process()) != null) {
+    while ((result = queryProcessor.process()) != null) {
       results.add(result);
     }
 
@@ -59,9 +58,7 @@ public class QueryManagerSynchronousTest
 
     Assert.assertEquals("Sizes must match.", numQueries, results.size());
 
-    for(int rc = 0;
-        rc < results.size();
-        rc++) {
+    for (int rc = 0; rc < results.size(); rc++) {
       result = results.get(rc);
       Assert.assertEquals("Ids must match.", Integer.toString(rc), result.getId());
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/QueueUtilsTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/QueueUtilsTest.java b/library/src/test/java/com/datatorrent/lib/appdata/query/QueueUtilsTest.java
index 62b7203..8202454 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/QueueUtilsTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/QueueUtilsTest.java
@@ -18,11 +18,11 @@
  */
 package com.datatorrent.lib.appdata.query;
 
-import com.google.common.base.Preconditions;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.base.Preconditions;
+
 import com.datatorrent.lib.appdata.QueueUtils.ConditionBarrier;
 
 public class QueueUtilsTest

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/SimpleDoneQueryQueueManagerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/SimpleDoneQueryQueueManagerTest.java b/library/src/test/java/com/datatorrent/lib/appdata/query/SimpleDoneQueryQueueManagerTest.java
index 6ddadf1..ffa7eba 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/SimpleDoneQueryQueueManagerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/SimpleDoneQueryQueueManagerTest.java
@@ -18,8 +18,6 @@
  */
 package com.datatorrent.lib.appdata.query;
 
-import com.google.common.base.Preconditions;
-
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -27,6 +25,8 @@ import org.slf4j.LoggerFactory;
 
 import org.apache.commons.lang3.mutable.MutableBoolean;
 
+import com.google.common.base.Preconditions;
+
 import com.datatorrent.lib.appdata.ThreadUtils.ExceptionSaverExceptionHandler;
 import com.datatorrent.lib.appdata.schemas.Query;
 
@@ -435,10 +435,10 @@ public class SimpleDoneQueryQueueManagerTest
     thread.stop();
   }
 
-  private Thread testBlockingNoStop(SimpleDoneQueueManager<Query, Void> sdqqm,
-                                    ExceptionSaverExceptionHandler eseh) throws InterruptedException
+  private Thread testBlockingNoStop(SimpleDoneQueueManager<Query, Void> sdqqm, ExceptionSaverExceptionHandler eseh)
+      throws InterruptedException
   {
-    Thread thread = new Thread(new BlockedThread<Query, Void, MutableBoolean>(sdqqm));
+    Thread thread = new Thread(new BlockedThread<>(sdqqm));
     thread.setUncaughtExceptionHandler(eseh);
     thread.start();
     Thread.sleep(100);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/WEQueryQueueManagerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/WEQueryQueueManagerTest.java b/library/src/test/java/com/datatorrent/lib/appdata/query/WEQueryQueueManagerTest.java
index 7b2c610..d275a61 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/WEQueryQueueManagerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/WEQueryQueueManagerTest.java
@@ -30,7 +30,7 @@ public class WEQueryQueueManagerTest
   @Test
   public void testSimpleRemoveEmpty()
   {
-    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<Query, Void>();
+    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<>();
 
     wqqm.setup(null);
     wqqm.beginWindow(0);
@@ -50,7 +50,7 @@ public class WEQueryQueueManagerTest
   @Test
   public void testSimpleAddOneRemove()
   {
-    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<Query, Void>();
+    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<>();
 
     wqqm.setup(null);
     wqqm.beginWindow(0);
@@ -72,7 +72,7 @@ public class WEQueryQueueManagerTest
   @Test
   public void testSimpleAddRemove2()
   {
-    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<Query, Void>();
+    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<>();
 
     wqqm.setup(null);
     wqqm.beginWindow(0);
@@ -104,7 +104,7 @@ public class WEQueryQueueManagerTest
   @Test
   public void testSimpleAddAfterStarted()
   {
-    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<Query, Void>();
+    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<>();
 
     wqqm.setup(null);
     wqqm.beginWindow(0);
@@ -139,14 +139,12 @@ public class WEQueryQueueManagerTest
   {
     final int numQueries = 3;
 
-    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<Query, Void>();
+    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<>();
 
     wqqm.setup(null);
     wqqm.beginWindow(0);
 
-    for(int qc = 0;
-        qc < numQueries;
-        qc++) {
+    for (int qc = 0; qc < numQueries; qc++) {
       Query query = new MockQuery(Integer.toString(qc));
       wqqm.enqueue(query, null, new MutableLong(3L));
     }
@@ -163,17 +161,11 @@ public class WEQueryQueueManagerTest
     {
       int qc = 0;
 
-      for(QueryBundle<Query, Void, MutableLong> tquery;
-          (tquery = wqqm.dequeue()) != null;
-          qc++) {
-        Assert.assertEquals("Query ids must equal.",
-                            Integer.toString(qc),
-                            tquery.getQuery().getId());
+      for (QueryBundle<Query, Void, MutableLong> tquery; (tquery = wqqm.dequeue()) != null; qc++) {
+        Assert.assertEquals("Query ids must equal.", Integer.toString(qc), tquery.getQuery().getId());
       }
 
-      Assert.assertEquals("The number of queries must match.",
-                          numQueries,
-                          qc);
+      Assert.assertEquals("The number of queries must match.", numQueries, qc);
     }
 
     wqqm.endWindow();
@@ -185,14 +177,12 @@ public class WEQueryQueueManagerTest
   {
     final int numQueries = 3;
 
-    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<Query, Void>();
+    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<>();
 
     wqqm.setup(null);
     wqqm.beginWindow(0);
 
-    for(int qc = 0;
-        qc < numQueries;
-        qc++) {
+    for (int qc = 0; qc < numQueries; qc++) {
       Query query = new MockQuery(Integer.toString(qc));
       wqqm.enqueue(query, null, new MutableLong(2L));
     }
@@ -203,9 +193,7 @@ public class WEQueryQueueManagerTest
     {
       int qc = 0;
 
-      for(QueryBundle<Query, Void, MutableLong> qb;
-          (qb = wqqm.dequeue()) != null;
-          qc++) {
+      for (QueryBundle<Query, Void, MutableLong> qb; (qb = wqqm.dequeue()) != null; qc++) {
         Query query = qb.getQuery();
         Assert.assertEquals("Query ids must equal.", Integer.toString(qc), query.getId());
       }
@@ -226,22 +214,18 @@ public class WEQueryQueueManagerTest
   public void testMixedExpiration()
   {
     final int numQueries = 3;
-    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<Query, Void>();
+    WindowEndQueueManager<Query, Void> wqqm = new WindowEndQueueManager<>();
 
     wqqm.setup(null);
     wqqm.beginWindow(0);
 
     {
-      for(int qc = 0;
-          qc < numQueries;
-          qc++) {
+      for (int qc = 0; qc < numQueries; qc++) {
         Query query = new MockQuery(Integer.toString(qc));
         wqqm.enqueue(query, null, new MutableLong(2L));
       }
 
-      for(int qc = 0;
-          qc < numQueries;
-          qc++) {
+      for (int qc = 0; qc < numQueries; qc++) {
         Query query = new MockQuery(Integer.toString(qc + numQueries));
         wqqm.enqueue(query, null, new MutableLong(3L));
       }
@@ -253,9 +237,7 @@ public class WEQueryQueueManagerTest
     {
       int qc = 0;
 
-      for(QueryBundle<Query, Void, MutableLong> qb;
-          (qb = wqqm.dequeue()) != null;
-          qc++) {
+      for (QueryBundle<Query, Void, MutableLong> qb; (qb = wqqm.dequeue()) != null; qc++) {
         Query query = qb.getQuery();
         Assert.assertEquals("Query ids must equal.", Integer.toString(qc), query.getId());
       }
@@ -269,9 +251,7 @@ public class WEQueryQueueManagerTest
     {
       int qc = 0;
 
-      for(QueryBundle<Query, Void, MutableLong> qb;
-          (qb = wqqm.dequeue()) != null;
-          qc++) {
+      for (QueryBundle<Query, Void, MutableLong> qb; (qb = wqqm.dequeue()) != null; qc++) {
         Query query = qb.getQuery();
         Assert.assertEquals("Query ids must equal.", Integer.toString(qc + numQueries), query.getId());
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/WindowBoundedServiceTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/WindowBoundedServiceTest.java b/library/src/test/java/com/datatorrent/lib/appdata/query/WindowBoundedServiceTest.java
index 3fb3780..63b7692 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/WindowBoundedServiceTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/WindowBoundedServiceTest.java
@@ -35,8 +35,7 @@ public class WindowBoundedServiceTest
   {
     CounterRunnable counterRunnable = new CounterRunnable();
 
-    WindowBoundedService wbs = new WindowBoundedService(1,
-                                                        counterRunnable);
+    WindowBoundedService wbs = new WindowBoundedService(1, counterRunnable);
     wbs.setup(null);
     Thread.sleep(500);
     Assert.assertEquals(0, counterRunnable.getCounter());
@@ -54,8 +53,7 @@ public class WindowBoundedServiceTest
   {
     CounterRunnable counterRunnable = new CounterRunnable();
 
-    WindowBoundedService wbs = new WindowBoundedService(1,
-                                                        counterRunnable);
+    WindowBoundedService wbs = new WindowBoundedService(1, counterRunnable);
     wbs.setup(null);
     wbs.beginWindow(0);
     Thread.sleep(500);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactoryTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactoryTest.java b/library/src/test/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactoryTest.java
index 36092e1..ca89789 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactoryTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactoryTest.java
@@ -41,8 +41,7 @@ public class MessageDeserializerFactoryTest
 
     try {
       qdf.deserialize(malformed);
-    }
-    catch(IOException e) {
+    } catch (IOException e) {
       exception = true;
     }
 
@@ -62,8 +61,7 @@ public class MessageDeserializerFactoryTest
 
     try {
       data = qdf.deserialize(unsupportedQuery);
-    }
-    catch(IOException e) {
+    } catch (IOException e) {
       exception = true;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotDeserializerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotDeserializerTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotDeserializerTest.java
index 17be67b..7f877aa 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotDeserializerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotDeserializerTest.java
@@ -19,18 +19,17 @@
 package com.datatorrent.lib.appdata.schemas;
 
 import java.io.IOException;
-
 import java.util.Map;
 import java.util.Set;
 
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestWatcher;
 
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
 import com.datatorrent.lib.appdata.query.serde.DataQuerySnapshotDeserializer;
 import com.datatorrent.lib.appdata.query.serde.MessageDeserializerFactory;
 
@@ -61,7 +60,7 @@ public class DataQuerySnapshotDeserializerTest
 
     String queryJSON = SchemaUtils.jarResourceFileToString("snapshotquery_deserialize1.json");
 
-    DataQuerySnapshot gQuery = (DataQuerySnapshot) deserializer.deserialize(queryJSON, DataQuerySnapshot.class, null);
+    DataQuerySnapshot gQuery = (DataQuerySnapshot)deserializer.deserialize(queryJSON, DataQuerySnapshot.class, null);
 
     Assert.assertEquals("The id must equal.", "1", gQuery.getId());
     Assert.assertEquals("The type must equal.", DataQuerySnapshot.TYPE, gQuery.getType());
@@ -83,7 +82,7 @@ public class DataQuerySnapshotDeserializerTest
 
     String queryJSON = SchemaUtils.jarResourceFileToString("snapshotquery_deserialize2.json");
 
-    DataQuerySnapshot gQuery = (DataQuerySnapshot) deserializer.deserialize(queryJSON, DataQuerySnapshot.class, null);
+    DataQuerySnapshot gQuery = (DataQuerySnapshot)deserializer.deserialize(queryJSON, DataQuerySnapshot.class, null);
 
     Assert.assertEquals("The id must equal.", "1", gQuery.getId());
     Assert.assertEquals("The type must equal.", DataQuerySnapshot.TYPE, gQuery.getType());
@@ -98,7 +97,7 @@ public class DataQuerySnapshotDeserializerTest
   public void noFieldsSpecified() throws Exception
   {
     String snapshotQuery = SchemaUtils.jarResourceFileToString("snapshotquery_deserialize3.json");
-    DataQuerySnapshot query = (DataQuerySnapshot) testMeta.queryDeserializerFactory.deserialize(snapshotQuery);
+    DataQuerySnapshot query = (DataQuerySnapshot)testMeta.queryDeserializerFactory.deserialize(snapshotQuery);
 
     Set<String> expectedFields = Sets.newHashSet("boolField", "intField", "doubleField");
 
@@ -170,7 +169,7 @@ public class DataQuerySnapshotDeserializerTest
   private void testValid(String validResourceJSON) throws Exception
   {
     String snapshotQuery = SchemaUtils.jarResourceFileToString(validResourceJSON);
-    DataQuerySnapshot query = (DataQuerySnapshot) testMeta.queryDeserializerFactory.deserialize(snapshotQuery);
+    DataQuerySnapshot query = (DataQuerySnapshot)testMeta.queryDeserializerFactory.deserialize(snapshotQuery);
     Assert.assertNotNull(query);
   }
 }



[04/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/LogicalCompareTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/LogicalCompareTest.java b/library/src/test/java/com/datatorrent/lib/math/LogicalCompareTest.java
index 6ecb398..9fd2526 100644
--- a/library/src/test/java/com/datatorrent/lib/math/LogicalCompareTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/LogicalCompareTest.java
@@ -29,76 +29,76 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class LogicalCompareTest
 {
-	/**
-	 * Test operator logic emits correct results.
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  /**
+   * Test operator logic emits correct results.
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   @Test
-	public void testNodeProcessing()
-	{
-		LogicalCompare<Integer> oper = new LogicalCompare<Integer>()
-		{
-		};
-		CollectorTestSink eSink = new CollectorTestSink();
-		CollectorTestSink neSink = new CollectorTestSink();
-		CollectorTestSink gtSink = new CollectorTestSink();
-		CollectorTestSink gteSink = new CollectorTestSink();
-		CollectorTestSink ltSink = new CollectorTestSink();
-		CollectorTestSink lteSink = new CollectorTestSink();
+  public void testNodeProcessing()
+  {
+    LogicalCompare<Integer> oper = new LogicalCompare<Integer>()
+    {
+    };
+    CollectorTestSink eSink = new CollectorTestSink();
+    CollectorTestSink neSink = new CollectorTestSink();
+    CollectorTestSink gtSink = new CollectorTestSink();
+    CollectorTestSink gteSink = new CollectorTestSink();
+    CollectorTestSink ltSink = new CollectorTestSink();
+    CollectorTestSink lteSink = new CollectorTestSink();
 
-		oper.equalTo.setSink(eSink);
-		oper.notEqualTo.setSink(neSink);
-		oper.greaterThan.setSink(gtSink);
-		oper.greaterThanOrEqualTo.setSink(gteSink);
-		oper.lessThan.setSink(ltSink);
-		oper.lessThanOrEqualTo.setSink(lteSink);
+    oper.equalTo.setSink(eSink);
+    oper.notEqualTo.setSink(neSink);
+    oper.greaterThan.setSink(gtSink);
+    oper.greaterThanOrEqualTo.setSink(gteSink);
+    oper.lessThan.setSink(ltSink);
+    oper.lessThanOrEqualTo.setSink(lteSink);
 
-		Pair<Integer, Integer> gtuple = new Pair<Integer, Integer>(2, 1);
-		Pair<Integer, Integer> etuple = new Pair<Integer, Integer>(2, 2);
-		Pair<Integer, Integer> ltuple = new Pair<Integer, Integer>(2, 3);
+    Pair<Integer, Integer> gtuple = new Pair<Integer, Integer>(2, 1);
+    Pair<Integer, Integer> etuple = new Pair<Integer, Integer>(2, 2);
+    Pair<Integer, Integer> ltuple = new Pair<Integer, Integer>(2, 3);
 
-		oper.beginWindow(0); //
+    oper.beginWindow(0); //
 
-		oper.input.process(gtuple);
-		oper.input.process(etuple);
-		oper.input.process(ltuple);
+    oper.input.process(gtuple);
+    oper.input.process(etuple);
+    oper.input.process(ltuple);
 
-		oper.endWindow(); //
+    oper.endWindow(); //
 
-		Assert.assertEquals("number emitted tuples", 1,
-				eSink.collectedTuples.size());
-		Assert.assertEquals("tuples were",
-				eSink.collectedTuples.get(0).equals(etuple), true);
+    Assert.assertEquals("number emitted tuples", 1,
+        eSink.collectedTuples.size());
+    Assert.assertEquals("tuples were",
+        eSink.collectedTuples.get(0).equals(etuple), true);
 
-		Assert.assertEquals("number emitted tuples", 2,
-				neSink.collectedTuples.size());
-		Assert.assertEquals("tuples were",
-				neSink.collectedTuples.get(0).equals(gtuple), true);
-		Assert.assertEquals("tuples were",
-				neSink.collectedTuples.get(1).equals(ltuple), true);
+    Assert.assertEquals("number emitted tuples", 2,
+        neSink.collectedTuples.size());
+    Assert.assertEquals("tuples were",
+        neSink.collectedTuples.get(0).equals(gtuple), true);
+    Assert.assertEquals("tuples were",
+        neSink.collectedTuples.get(1).equals(ltuple), true);
 
-		Assert.assertEquals("number emitted tuples", 1,
-				gtSink.collectedTuples.size());
-		Assert.assertEquals("tuples were",
-				gtSink.collectedTuples.get(0).equals(gtuple), true);
+    Assert.assertEquals("number emitted tuples", 1,
+        gtSink.collectedTuples.size());
+    Assert.assertEquals("tuples were",
+        gtSink.collectedTuples.get(0).equals(gtuple), true);
 
-		Assert.assertEquals("number emitted tuples", 2,
-				gteSink.collectedTuples.size());
-		Assert.assertEquals("tuples were",
-				gteSink.collectedTuples.get(0).equals(gtuple), true);
-		Assert.assertEquals("tuples were",
-				gteSink.collectedTuples.get(1).equals(etuple), true);
+    Assert.assertEquals("number emitted tuples", 2,
+        gteSink.collectedTuples.size());
+    Assert.assertEquals("tuples were",
+        gteSink.collectedTuples.get(0).equals(gtuple), true);
+    Assert.assertEquals("tuples were",
+        gteSink.collectedTuples.get(1).equals(etuple), true);
 
-		Assert.assertEquals("number emitted tuples", 1,
-				ltSink.collectedTuples.size());
-		Assert.assertEquals("tuples were",
-				ltSink.collectedTuples.get(0).equals(ltuple), true);
+    Assert.assertEquals("number emitted tuples", 1,
+        ltSink.collectedTuples.size());
+    Assert.assertEquals("tuples were",
+        ltSink.collectedTuples.get(0).equals(ltuple), true);
 
-		Assert.assertEquals("number emitted tuples", 2,
-				lteSink.collectedTuples.size());
-		Assert.assertEquals("tuples were",
-				lteSink.collectedTuples.get(0).equals(etuple), true);
-		Assert.assertEquals("tuples were",
-				lteSink.collectedTuples.get(1).equals(ltuple), true);
-	}
+    Assert.assertEquals("number emitted tuples", 2,
+        lteSink.collectedTuples.size());
+    Assert.assertEquals("tuples were",
+        lteSink.collectedTuples.get(0).equals(etuple), true);
+    Assert.assertEquals("tuples were",
+        lteSink.collectedTuples.get(1).equals(ltuple), true);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/LogicalCompareToConstantTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/LogicalCompareToConstantTest.java b/library/src/test/java/com/datatorrent/lib/math/LogicalCompareToConstantTest.java
index c813629..df7ef2d 100644
--- a/library/src/test/java/com/datatorrent/lib/math/LogicalCompareToConstantTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/LogicalCompareToConstantTest.java
@@ -32,72 +32,72 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class LogicalCompareToConstantTest
 {
-	/**
-	 * Test operator logic emits correct results.
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  /**
+   * Test operator logic emits correct results.
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   @Test
-	public void testNodeProcessing()
-	{
-		LogicalCompareToConstant<Integer> oper = new LogicalCompareToConstant<Integer>()
-		{
-		};
-		CollectorTestSink eSink = new CollectorTestSink();
-		CollectorTestSink neSink = new CollectorTestSink();
-		CollectorTestSink gtSink = new CollectorTestSink();
-		CollectorTestSink gteSink = new CollectorTestSink();
-		CollectorTestSink ltSink = new CollectorTestSink();
-		CollectorTestSink lteSink = new CollectorTestSink();
+  public void testNodeProcessing()
+  {
+    LogicalCompareToConstant<Integer> oper = new LogicalCompareToConstant<Integer>()
+    {
+    };
+    CollectorTestSink eSink = new CollectorTestSink();
+    CollectorTestSink neSink = new CollectorTestSink();
+    CollectorTestSink gtSink = new CollectorTestSink();
+    CollectorTestSink gteSink = new CollectorTestSink();
+    CollectorTestSink ltSink = new CollectorTestSink();
+    CollectorTestSink lteSink = new CollectorTestSink();
 
-		oper.equalTo.setSink(eSink);
-		oper.notEqualTo.setSink(neSink);
-		oper.greaterThan.setSink(gtSink);
-		oper.greaterThanOrEqualTo.setSink(gteSink);
-		oper.lessThan.setSink(ltSink);
-		oper.lessThanOrEqualTo.setSink(lteSink);
-		oper.setConstant(2);
+    oper.equalTo.setSink(eSink);
+    oper.notEqualTo.setSink(neSink);
+    oper.greaterThan.setSink(gtSink);
+    oper.greaterThanOrEqualTo.setSink(gteSink);
+    oper.lessThan.setSink(ltSink);
+    oper.lessThanOrEqualTo.setSink(lteSink);
+    oper.setConstant(2);
 
-		oper.beginWindow(0); //
-		oper.input.process(1);
-		oper.input.process(2);
-		oper.input.process(3);
+    oper.beginWindow(0); //
+    oper.input.process(1);
+    oper.input.process(2);
+    oper.input.process(3);
 
-		oper.endWindow(); //
+    oper.endWindow(); //
 
-		Assert.assertEquals("number emitted tuples", 1,
-				eSink.collectedTuples.size());
-		Assert.assertEquals("tuples were", eSink.collectedTuples.get(0).equals(2),
-				true);
+    Assert.assertEquals("number emitted tuples", 1,
+        eSink.collectedTuples.size());
+    Assert.assertEquals("tuples were", eSink.collectedTuples.get(0).equals(2),
+        true);
 
-		Assert.assertEquals("number emitted tuples", 2,
-				neSink.collectedTuples.size());
-		Assert.assertEquals("tuples were", neSink.collectedTuples.get(0).equals(1),
-				true);
-		Assert.assertEquals("tuples were", neSink.collectedTuples.get(1).equals(3),
-				true);
+    Assert.assertEquals("number emitted tuples", 2,
+        neSink.collectedTuples.size());
+    Assert.assertEquals("tuples were", neSink.collectedTuples.get(0).equals(1),
+        true);
+    Assert.assertEquals("tuples were", neSink.collectedTuples.get(1).equals(3),
+        true);
 
-		Assert.assertEquals("number emitted tuples", 1,
-				gtSink.collectedTuples.size());
-		Assert.assertEquals("tuples were", gtSink.collectedTuples.get(0).equals(1),
-				true);
+    Assert.assertEquals("number emitted tuples", 1,
+        gtSink.collectedTuples.size());
+    Assert.assertEquals("tuples were", gtSink.collectedTuples.get(0).equals(1),
+        true);
 
-		Assert.assertEquals("number emitted tuples", 2,
-				gteSink.collectedTuples.size());
-		Assert.assertEquals("tuples were",
-				gteSink.collectedTuples.get(0).equals(1), true);
-		Assert.assertEquals("tuples were",
-				gteSink.collectedTuples.get(1).equals(2), true);
+    Assert.assertEquals("number emitted tuples", 2,
+        gteSink.collectedTuples.size());
+    Assert.assertEquals("tuples were",
+        gteSink.collectedTuples.get(0).equals(1), true);
+    Assert.assertEquals("tuples were",
+        gteSink.collectedTuples.get(1).equals(2), true);
 
-		Assert.assertEquals("number emitted tuples", 1,
-				ltSink.collectedTuples.size());
-		Assert.assertEquals("tuples were", ltSink.collectedTuples.get(0).equals(3),
-				true);
+    Assert.assertEquals("number emitted tuples", 1,
+        ltSink.collectedTuples.size());
+    Assert.assertEquals("tuples were", ltSink.collectedTuples.get(0).equals(3),
+        true);
 
-		Assert.assertEquals("number emitted tuples", 2,
-				lteSink.collectedTuples.size());
-		Assert.assertEquals("tuples were",
-				lteSink.collectedTuples.get(0).equals(2), true);
-		Assert.assertEquals("tuples were",
-				lteSink.collectedTuples.get(1).equals(3), true);
-	}
+    Assert.assertEquals("number emitted tuples", 2,
+        lteSink.collectedTuples.size());
+    Assert.assertEquals("tuples were",
+        lteSink.collectedTuples.get(0).equals(2), true);
+    Assert.assertEquals("tuples were",
+        lteSink.collectedTuples.get(1).equals(3), true);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/MarginKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/MarginKeyValTest.java b/library/src/test/java/com/datatorrent/lib/math/MarginKeyValTest.java
index 884e981..365df7a 100644
--- a/library/src/test/java/com/datatorrent/lib/math/MarginKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/MarginKeyValTest.java
@@ -29,57 +29,53 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 public class MarginKeyValTest
 {
-	/**
-	 * Test node logic emits correct results.
-	 */
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		testNodeProcessingSchema(new MarginKeyVal<String, Integer>());
-		testNodeProcessingSchema(new MarginKeyVal<String, Double>());
-		testNodeProcessingSchema(new MarginKeyVal<String, Float>());
-		testNodeProcessingSchema(new MarginKeyVal<String, Short>());
-		testNodeProcessingSchema(new MarginKeyVal<String, Long>());
-	}
+  /**
+   * Test node logic emits correct results.
+   */
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    testNodeProcessingSchema(new MarginKeyVal<String, Integer>());
+    testNodeProcessingSchema(new MarginKeyVal<String, Double>());
+    testNodeProcessingSchema(new MarginKeyVal<String, Float>());
+    testNodeProcessingSchema(new MarginKeyVal<String, Short>());
+    testNodeProcessingSchema(new MarginKeyVal<String, Long>());
+  }
 
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public void testNodeProcessingSchema(MarginKeyVal oper)
-	{
-		CollectorTestSink marginSink = new CollectorTestSink();
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  public void testNodeProcessingSchema(MarginKeyVal oper)
+  {
+    CollectorTestSink marginSink = new CollectorTestSink();
 
-		oper.margin.setSink(marginSink);
+    oper.margin.setSink(marginSink);
 
-		oper.beginWindow(0);
-		oper.numerator.process(new KeyValPair("a", 2));
-		oper.numerator.process(new KeyValPair("b", 20));
-		oper.numerator.process(new KeyValPair("c", 1000));
+    oper.beginWindow(0);
+    oper.numerator.process(new KeyValPair("a", 2));
+    oper.numerator.process(new KeyValPair("b", 20));
+    oper.numerator.process(new KeyValPair("c", 1000));
 
-		oper.denominator.process(new KeyValPair("a", 2));
-		oper.denominator.process(new KeyValPair("b", 40));
-		oper.denominator.process(new KeyValPair("c", 500));
-		oper.endWindow();
+    oper.denominator.process(new KeyValPair("a", 2));
+    oper.denominator.process(new KeyValPair("b", 40));
+    oper.denominator.process(new KeyValPair("c", 500));
+    oper.endWindow();
 
-		Assert.assertEquals("number emitted tuples", 3,
-				marginSink.collectedTuples.size());
-		for (int i = 0; i < marginSink.collectedTuples.size(); i++) {
-			if ("a".equals(((KeyValPair<String, Number>) marginSink.collectedTuples
-					.get(i)).getKey())) {
-				Assert.assertEquals("emitted value for 'a' was ", new Double(0),
-						((KeyValPair<String, Number>) marginSink.collectedTuples.get(i))
-								.getValue().doubleValue(), 0);
-			}
-			if ("b".equals(((KeyValPair<String, Number>) marginSink.collectedTuples
-					.get(i)).getKey())) {
-				Assert.assertEquals("emitted value for 'b' was ", new Double(0.5),
-						((KeyValPair<String, Number>) marginSink.collectedTuples.get(i))
-								.getValue().doubleValue(), 0);
-			}
-			if ("c".equals(((KeyValPair<String, Number>) marginSink.collectedTuples
-					.get(i)).getKey())) {
-				Assert.assertEquals("emitted value for 'c' was ", new Double(-1),
-						((KeyValPair<String, Number>) marginSink.collectedTuples.get(i))
-								.getValue().doubleValue(), 0);
-			}
-		}
-	}
+    Assert.assertEquals("number emitted tuples", 3,
+        marginSink.collectedTuples.size());
+    for (int i = 0; i < marginSink.collectedTuples.size(); i++) {
+      if ("a".equals(((KeyValPair<String, Number>)marginSink.collectedTuples.get(i)).getKey())) {
+        Assert.assertEquals("emitted value for 'a' was ", 0d,
+            ((KeyValPair<String, Number>)marginSink.collectedTuples.get(i)).getValue().doubleValue(), 0);
+      }
+      if ("b".equals(((KeyValPair<String, Number>)marginSink.collectedTuples
+          .get(i)).getKey())) {
+        Assert.assertEquals("emitted value for 'b' was ", 0.5,
+            ((KeyValPair<String, Number>)marginSink.collectedTuples.get(i)).getValue().doubleValue(), 0);
+      }
+      if ("c".equals(((KeyValPair<String, Number>)marginSink.collectedTuples
+          .get(i)).getKey())) {
+        Assert.assertEquals("emitted value for 'c' was ", (double)-1,
+            ((KeyValPair<String, Number>)marginSink.collectedTuples.get(i)).getValue().doubleValue(), 0);
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java b/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java
index e16f3dc..07a378c 100644
--- a/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java
@@ -36,62 +36,58 @@ import com.datatorrent.lib.testbench.CountAndLastTupleTestSink;
  */
 public class MarginMapTest
 {
-	private static Logger LOG = LoggerFactory.getLogger(MarginMapTest.class);
+  private static Logger LOG = LoggerFactory.getLogger(MarginMapTest.class);
 
-	/**
-	 * Test node logic emits correct results
-	 */
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		testNodeProcessingSchema(new MarginMap<String, Integer>());
-		testNodeProcessingSchema(new MarginMap<String, Double>());
-		testNodeProcessingSchema(new MarginMap<String, Float>());
-		testNodeProcessingSchema(new MarginMap<String, Short>());
-		testNodeProcessingSchema(new MarginMap<String, Long>());
-	}
+  /**
+   * Test node logic emits correct results
+   */
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    testNodeProcessingSchema(new MarginMap<String, Integer>());
+    testNodeProcessingSchema(new MarginMap<String, Double>());
+    testNodeProcessingSchema(new MarginMap<String, Float>());
+    testNodeProcessingSchema(new MarginMap<String, Short>());
+    testNodeProcessingSchema(new MarginMap<String, Long>());
+  }
 
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public void testNodeProcessingSchema(MarginMap oper)
-	{
-		CountAndLastTupleTestSink marginSink = new CountAndLastTupleTestSink();
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  public void testNodeProcessingSchema(MarginMap oper)
+  {
+    CountAndLastTupleTestSink marginSink = new CountAndLastTupleTestSink();
 
-		oper.margin.setSink(marginSink);
+    oper.margin.setSink(marginSink);
 
-		oper.beginWindow(0);
-		HashMap<String, Number> input = new HashMap<String, Number>();
-		input.put("a", 2);
-		input.put("b", 20);
-		input.put("c", 1000);
-		oper.numerator.process(input);
+    oper.beginWindow(0);
+    HashMap<String, Number> input = new HashMap<String, Number>();
+    input.put("a", 2);
+    input.put("b", 20);
+    input.put("c", 1000);
+    oper.numerator.process(input);
 
-		input.clear();
-		input.put("a", 2);
-		input.put("b", 40);
-		input.put("c", 500);
-		oper.denominator.process(input);
+    input.clear();
+    input.put("a", 2);
+    input.put("b", 40);
+    input.put("c", 500);
+    oper.denominator.process(input);
 
-		oper.endWindow();
+    oper.endWindow();
 
-		// One for each key
-		Assert.assertEquals("number emitted tuples", 1, marginSink.count);
+    // One for each key
+    Assert.assertEquals("number emitted tuples", 1, marginSink.count);
 
-		HashMap<String, Number> output = (HashMap<String, Number>) marginSink.tuple;
-		for (Map.Entry<String, Number> e : output.entrySet()) {
-			LOG.debug(String.format("Key, value is %s,%f", e.getKey(), e.getValue()
-					.doubleValue()));
-			if (e.getKey().equals("a")) {
-				Assert.assertEquals("emitted value for 'a' was ", new Double(0), e
-						.getValue().doubleValue(), 0);
-			} else if (e.getKey().equals("b")) {
-				Assert.assertEquals("emitted tuple for 'b' was ", new Double(0.5), e
-						.getValue().doubleValue(), 0);
-			} else if (e.getKey().equals("c")) {
-				Assert.assertEquals("emitted tuple for 'c' was ", new Double(-1.0), e
-						.getValue().doubleValue(), 0);
-			} else {
-				LOG.debug(String.format("key was %s", e.getKey()));
-			}
-		}
-	}
+    HashMap<String, Number> output = (HashMap<String, Number>)marginSink.tuple;
+    for (Map.Entry<String, Number> e : output.entrySet()) {
+      LOG.debug(String.format("Key, value is %s,%f", e.getKey(), e.getValue().doubleValue()));
+      if (e.getKey().equals("a")) {
+        Assert.assertEquals("emitted value for 'a' was ", 0d, e.getValue().doubleValue(), 0);
+      } else if (e.getKey().equals("b")) {
+        Assert.assertEquals("emitted tuple for 'b' was ", 0.5, e.getValue().doubleValue(), 0);
+      } else if (e.getKey().equals("c")) {
+        Assert.assertEquals("emitted tuple for 'c' was ", -1.0, e.getValue().doubleValue(), 0);
+      } else {
+        LOG.debug(String.format("key was %s", e.getKey()));
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/MarginTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/MarginTest.java b/library/src/test/java/com/datatorrent/lib/math/MarginTest.java
index 899a7ee..4ae6fe8 100644
--- a/library/src/test/java/com/datatorrent/lib/math/MarginTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/MarginTest.java
@@ -45,7 +45,7 @@ public class MarginTest
   }
 
   @SuppressWarnings({ "rawtypes", "unchecked" })
-	public void testNodeProcessingSchema(Margin oper)
+  public void testNodeProcessingSchema(Margin oper)
   {
     CountAndLastTupleTestSink marginSink = new CountAndLastTupleTestSink();
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/MaxKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/MaxKeyValTest.java b/library/src/test/java/com/datatorrent/lib/math/MaxKeyValTest.java
index a532da3..55cf5b4 100644
--- a/library/src/test/java/com/datatorrent/lib/math/MaxKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/MaxKeyValTest.java
@@ -23,11 +23,10 @@ import java.util.ArrayList;
 import org.junit.Assert;
 import org.junit.Test;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.InputOperator;
-
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.lib.testbench.CountAndLastTupleTestSink;
 import com.datatorrent.lib.util.KeyValPair;
 
@@ -66,28 +65,24 @@ public class MaxKeyValTest
     int numtuples = 10000;
     if (type.equals("integer")) {
       for (int i = 0; i < numtuples; i++) {
-        oper.data.process(new KeyValPair("a", new Integer(i)));
+        oper.data.process(new KeyValPair("a", i));
       }
-    }
-    else if (type.equals("double")) {
+    } else if (type.equals("double")) {
       for (int i = 0; i < numtuples; i++) {
-        oper.data.process(new KeyValPair("a", new Double(i)));
+        oper.data.process(new KeyValPair("a", (double)i));
       }
-    }
-    else if (type.equals("long")) {
+    } else if (type.equals("long")) {
       for (int i = 0; i < numtuples; i++) {
-        oper.data.process(new KeyValPair("a", new Long(i)));
+        oper.data.process(new KeyValPair("a", (long)i));
       }
-    }
-    else if (type.equals("short")) {
+    } else if (type.equals("short")) {
       int count = numtuples / 1000; // cannot cross 64K
       for (short j = 0; j < count; j++) {
-        oper.data.process(new KeyValPair("a", new Short(j)));
+        oper.data.process(new KeyValPair("a", j));
       }
-    }
-    else if (type.equals("float")) {
+    } else if (type.equals("float")) {
       for (int i = 0; i < numtuples; i++) {
-        oper.data.process(new KeyValPair("a", new Float(i)));
+        oper.data.process(new KeyValPair("a", (float)i));
       }
     }
 
@@ -97,8 +92,7 @@ public class MaxKeyValTest
     Number val = ((KeyValPair<String, Number>)maxSink.tuple).getValue().intValue();
     if (type.equals("short")) {
       Assert.assertEquals("emitted max value was ", (new Double(numtuples / 1000 - 1)).intValue(), val);
-    }
-    else {
+    } else {
       Assert.assertEquals("emitted max value was ", (new Double(numtuples - 1)).intValue(), val);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/MaxTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/MaxTest.java b/library/src/test/java/com/datatorrent/lib/math/MaxTest.java
index 87037b4..a294c26 100644
--- a/library/src/test/java/com/datatorrent/lib/math/MaxTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/MaxTest.java
@@ -41,9 +41,9 @@ public class MaxTest
 
     oper.beginWindow(0); //
 
-    Double a = new Double(2.0);
-    Double b = new Double(20.0);
-    Double c = new Double(1000.0);
+    Double a = 2.0;
+    Double b = 20.0;
+    Double c = 1000.0;
 
     oper.data.process(a);
     oper.data.process(b);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/MinKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/MinKeyValTest.java b/library/src/test/java/com/datatorrent/lib/math/MinKeyValTest.java
index be6461d..6d9371c 100644
--- a/library/src/test/java/com/datatorrent/lib/math/MinKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/MinKeyValTest.java
@@ -23,11 +23,10 @@ import java.util.ArrayList;
 import org.junit.Assert;
 import org.junit.Test;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.InputOperator;
-
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.lib.testbench.CountAndLastTupleTestSink;
 import com.datatorrent.lib.util.KeyValPair;
 
@@ -36,105 +35,104 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 public class MinKeyValTest
 {
-	/**
-	 * Test functional logic
-	 */
-	@Test
-	public void testNodeProcessing()
-	{
-		testSchemaNodeProcessing(new MinKeyVal<String, Integer>(), "integer");
-		testSchemaNodeProcessing(new MinKeyVal<String, Double>(), "double");
-		testSchemaNodeProcessing(new MinKeyVal<String, Long>(), "long");
-		testSchemaNodeProcessing(new MinKeyVal<String, Short>(), "short");
-		testSchemaNodeProcessing(new MinKeyVal<String, Float>(), "float");
-	}
+  /**
+   * Test functional logic
+   */
+  @Test
+  public void testNodeProcessing()
+  {
+    testSchemaNodeProcessing(new MinKeyVal<String, Integer>(), "integer");
+    testSchemaNodeProcessing(new MinKeyVal<String, Double>(), "double");
+    testSchemaNodeProcessing(new MinKeyVal<String, Long>(), "long");
+    testSchemaNodeProcessing(new MinKeyVal<String, Short>(), "short");
+    testSchemaNodeProcessing(new MinKeyVal<String, Float>(), "float");
+  }
 
-	/**
-	 * Test operator logic emits correct results for each schema.
-	 *
-	 */
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public void testSchemaNodeProcessing(MinKeyVal oper, String type)
-	{
-		CountAndLastTupleTestSink minSink = new CountAndLastTupleTestSink();
-		oper.min.setSink(minSink);
+  /**
+   * Test operator logic emits correct results for each schema.
+   *
+   */
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  public void testSchemaNodeProcessing(MinKeyVal oper, String type)
+  {
+    CountAndLastTupleTestSink minSink = new CountAndLastTupleTestSink();
+    oper.min.setSink(minSink);
 
-		oper.beginWindow(0);
+    oper.beginWindow(0);
 
-		int numtuples = 10000;
-		if (type.equals("integer")) {
-			for (int i = numtuples; i > 0; i--) {
-				oper.data.process(new KeyValPair("a", new Integer(i)));
-			}
-		} else if (type.equals("double")) {
-			for (int i = numtuples; i > 0; i--) {
-				oper.data.process(new KeyValPair("a", new Double(i)));
-			}
-		} else if (type.equals("long")) {
-			for (int i = numtuples; i > 0; i--) {
-				oper.data.process(new KeyValPair("a", new Long(i)));
-			}
-		} else if (type.equals("short")) {
-			for (short j = 1000; j > 0; j--) { // cannot cross 64K
-				oper.data.process(new KeyValPair("a", new Short(j)));
-			}
-		} else if (type.equals("float")) {
-			for (int i = numtuples; i > 0; i--) {
-				oper.data.process(new KeyValPair("a", new Float(i)));
-			}
-		}
+    int numtuples = 10000;
+    if (type.equals("integer")) {
+      for (int i = numtuples; i > 0; i--) {
+        oper.data.process(new KeyValPair("a", new Integer(i)));
+      }
+    } else if (type.equals("double")) {
+      for (int i = numtuples; i > 0; i--) {
+        oper.data.process(new KeyValPair("a", (double)i));
+      }
+    } else if (type.equals("long")) {
+      for (int i = numtuples; i > 0; i--) {
+        oper.data.process(new KeyValPair("a", (long)i));
+      }
+    } else if (type.equals("short")) {
+      for (short j = 1000; j > 0; j--) { // cannot cross 64K
+        oper.data.process(new KeyValPair("a", j));
+      }
+    } else if (type.equals("float")) {
+      for (int i = numtuples; i > 0; i--) {
+        oper.data.process(new KeyValPair("a", (float)i));
+      }
+    }
 
-		oper.endWindow();
+    oper.endWindow();
 
-		Assert.assertEquals("number emitted tuples", 1, minSink.count);
-		Number val = ((KeyValPair<String, Number>) minSink.tuple).getValue()
-				.intValue();
-		if (type.equals("short")) {
-			Assert.assertEquals("emitted min value was ", 1, val);
-		} else {
-			Assert.assertEquals("emitted min value was ", 1, val);
-		}
-	}
+    Assert.assertEquals("number emitted tuples", 1, minSink.count);
+    Number val = ((KeyValPair<String, Number>)minSink.tuple).getValue().intValue();
+    if (type.equals("short")) {
+      Assert.assertEquals("emitted min value was ", 1, val);
+    } else {
+      Assert.assertEquals("emitted min value was ", 1, val);
+    }
+  }
 
-	/**
-	 * Used to test partitioning.
-	 */
-	public static class TestInputOperator extends BaseOperator implements
-			InputOperator
-	{
-		public final transient DefaultOutputPort<KeyValPair<String, Integer>> output = new DefaultOutputPort<KeyValPair<String, Integer>>();
-		private transient boolean first = true;
+  /**
+   * Used to test partitioning.
+   */
+  public static class TestInputOperator extends BaseOperator implements
+      InputOperator
+  {
+    public final transient DefaultOutputPort<KeyValPair<String, Integer>> output = new DefaultOutputPort<KeyValPair<String, Integer>>();
+    private transient boolean first = true;
 
-		@SuppressWarnings({ "unchecked", "rawtypes" })
-		@Override
-		public void emitTuples()
-		{
-			if (first) {
-				for (int i = 40; i < 100; i++) {
-					output.emit(new KeyValPair("a", new Integer(i)));
-				}
-				for (int i = 50; i < 100; i++) {
-					output.emit(new KeyValPair("b", new Integer(i)));
-				}
-				for (int i = 60; i < 100; i++) {
-					output.emit(new KeyValPair("c", new Integer(i)));
-				}
-				first = false;
-			}
-		}
-	}
+    @SuppressWarnings({ "unchecked", "rawtypes" })
+    @Override
+    public void emitTuples()
+    {
+      if (first) {
+        for (int i = 40; i < 100; i++) {
+          output.emit(new KeyValPair("a", i));
+        }
+        for (int i = 50; i < 100; i++) {
+          output.emit(new KeyValPair("b", i));
+        }
+        for (int i = 60; i < 100; i++) {
+          output.emit(new KeyValPair("c", i));
+        }
+        first = false;
+      }
+    }
+  }
 
-	public static class CollectorOperator extends BaseOperator
-	{
-		public static final ArrayList<KeyValPair<String, Integer>> buffer = new ArrayList<KeyValPair<String, Integer>>();
-		public final transient DefaultInputPort<KeyValPair<String, Integer>> input = new DefaultInputPort<KeyValPair<String, Integer>>()
-		{
-			@SuppressWarnings({ "unchecked", "rawtypes" })
-			@Override
-			public void process(KeyValPair<String, Integer> tuple)
-			{
-				buffer.add(new KeyValPair(tuple.getKey(), tuple.getValue()));
-			}
-		};
-	}
+  public static class CollectorOperator extends BaseOperator
+  {
+    public static final ArrayList<KeyValPair<String, Integer>> buffer = new ArrayList<KeyValPair<String, Integer>>();
+    public final transient DefaultInputPort<KeyValPair<String, Integer>> input = new DefaultInputPort<KeyValPair<String, Integer>>()
+    {
+      @SuppressWarnings({ "unchecked", "rawtypes" })
+      @Override
+      public void process(KeyValPair<String, Integer> tuple)
+      {
+        buffer.add(new KeyValPair(tuple.getKey(), tuple.getValue()));
+      }
+    };
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/MinTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/MinTest.java b/library/src/test/java/com/datatorrent/lib/math/MinTest.java
index 18aa8cd..dfffae3 100644
--- a/library/src/test/java/com/datatorrent/lib/math/MinTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/MinTest.java
@@ -34,7 +34,7 @@ public class MinTest
    * Test oper logic emits correct results
    */
   @SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @Test
   public void testNodeSchemaProcessing()
   {
     Min<Double> oper = new Min<Double>();
@@ -43,9 +43,9 @@ public class MinTest
 
     oper.beginWindow(0); //
 
-    Double a = new Double(2.0);
-    Double b = new Double(20.0);
-    Double c = new Double(1000.0);
+    Double a = 2.0;
+    Double b = 20.0;
+    Double c = 1000.0;
 
     oper.data.process(a);
     oper.data.process(b);
@@ -74,6 +74,6 @@ public class MinTest
     oper.endWindow(); //
 
     Assert.assertEquals("number emitted tuples", 1, minSink.count);
-    Assert.assertEquals("emitted high value was ", new Double(1.0), minSink.tuple);
+    Assert.assertEquals("emitted high value was ", 1.0, minSink.tuple);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/MultiplyByConstantTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/MultiplyByConstantTest.java b/library/src/test/java/com/datatorrent/lib/math/MultiplyByConstantTest.java
index 0f5283b..68e89eb 100644
--- a/library/src/test/java/com/datatorrent/lib/math/MultiplyByConstantTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/MultiplyByConstantTest.java
@@ -28,7 +28,7 @@ import com.datatorrent.lib.testbench.SumTestSink;
  */
 public class MultiplyByConstantTest
 {
-	/**
+  /**
    * Test oper logic emits correct results
    */
   @Test

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/QuotientMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/QuotientMapTest.java b/library/src/test/java/com/datatorrent/lib/math/QuotientMapTest.java
index 7ff2e66..92c0e77 100644
--- a/library/src/test/java/com/datatorrent/lib/math/QuotientMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/QuotientMapTest.java
@@ -33,60 +33,60 @@ import com.datatorrent.lib.testbench.CountAndLastTupleTestSink;
  */
 public class QuotientMapTest
 {
-	private static Logger LOG = LoggerFactory.getLogger(QuotientMap.class);
+  private static Logger LOG = LoggerFactory.getLogger(QuotientMap.class);
 
-	/**
-	 * Test node logic emits correct results
-	 */
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		testNodeProcessingSchema(new QuotientMap<String, Integer>());
-		testNodeProcessingSchema(new QuotientMap<String, Double>());
-	}
+  /**
+   * Test node logic emits correct results
+   */
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    testNodeProcessingSchema(new QuotientMap<String, Integer>());
+    testNodeProcessingSchema(new QuotientMap<String, Double>());
+  }
 
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public void testNodeProcessingSchema(QuotientMap oper) throws Exception
-	{
-		CountAndLastTupleTestSink quotientSink = new CountAndLastTupleTestSink();
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  public void testNodeProcessingSchema(QuotientMap oper) throws Exception
+  {
+    CountAndLastTupleTestSink quotientSink = new CountAndLastTupleTestSink();
 
-		oper.quotient.setSink(quotientSink);
-		oper.setMult_by(2);
+    oper.quotient.setSink(quotientSink);
+    oper.setMult_by(2);
 
-		oper.beginWindow(0); //
-		HashMap<String, Number> input = new HashMap<String, Number>();
-		int numtuples = 100;
-		for (int i = 0; i < numtuples; i++) {
-			input.clear();
-			input.put("a", 2);
-			input.put("b", 20);
-			input.put("c", 1000);
-			oper.numerator.process(input);
-			input.clear();
-			input.put("a", 2);
-			input.put("b", 40);
-			input.put("c", 500);
-			oper.denominator.process(input);
-		}
+    oper.beginWindow(0); //
+    HashMap<String, Number> input = new HashMap<String, Number>();
+    int numtuples = 100;
+    for (int i = 0; i < numtuples; i++) {
+      input.clear();
+      input.put("a", 2);
+      input.put("b", 20);
+      input.put("c", 1000);
+      oper.numerator.process(input);
+      input.clear();
+      input.put("a", 2);
+      input.put("b", 40);
+      input.put("c", 500);
+      oper.denominator.process(input);
+    }
 
-		oper.endWindow();
+    oper.endWindow();
 
-		// One for each key
-		Assert.assertEquals("number emitted tuples", 1, quotientSink.count);
-		HashMap<String, Number> output = (HashMap<String, Number>) quotientSink.tuple;
-		for (Map.Entry<String, Number> e : output.entrySet()) {
-			if (e.getKey().equals("a")) {
-				Assert.assertEquals("emitted value for 'a' was ", new Double(2),
-						e.getValue());
-			} else if (e.getKey().equals("b")) {
-				Assert.assertEquals("emitted tuple for 'b' was ", new Double(1),
-						e.getValue());
-			} else if (e.getKey().equals("c")) {
-				Assert.assertEquals("emitted tuple for 'c' was ", new Double(4),
-						e.getValue());
-			} else {
-				LOG.debug(String.format("key was %s", e.getKey()));
-			}
-		}
-	}
+    // One for each key
+    Assert.assertEquals("number emitted tuples", 1, quotientSink.count);
+    HashMap<String, Number> output = (HashMap<String, Number>)quotientSink.tuple;
+    for (Map.Entry<String, Number> e : output.entrySet()) {
+      if (e.getKey().equals("a")) {
+        Assert.assertEquals("emitted value for 'a' was ", 2d,
+            e.getValue());
+      } else if (e.getKey().equals("b")) {
+        Assert.assertEquals("emitted tuple for 'b' was ", 1d,
+            e.getValue());
+      } else if (e.getKey().equals("c")) {
+        Assert.assertEquals("emitted tuple for 'c' was ", 4d,
+            e.getValue());
+      } else {
+        LOG.debug(String.format("key was %s", e.getKey()));
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/QuotientTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/QuotientTest.java b/library/src/test/java/com/datatorrent/lib/math/QuotientTest.java
index 9b71427..604e45f 100644
--- a/library/src/test/java/com/datatorrent/lib/math/QuotientTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/QuotientTest.java
@@ -31,71 +31,72 @@ import com.datatorrent.api.Sink;
  */
 public class QuotientTest
 {
-	class TestSink implements Sink<Object>
-	{
-		List<Object> collectedTuples = new ArrayList<Object>();
 
-		@Override
-		public void put(Object payload)
-		{
-          collectedTuples.add(payload);
-		}
+  class TestSink implements Sink<Object>
+  {
+    List<Object> collectedTuples = new ArrayList<Object>();
 
-		@Override
-		public int getCount(boolean reset)
-		{
-			throw new UnsupportedOperationException("Not supported yet.");
-		}
-	}
+    @Override
+    public void put(Object payload)
+    {
+      collectedTuples.add(payload);
+    }
 
-	/**
-	 * Test oper logic emits correct results.
-	 */
-	@Test
-	public void testNodeSchemaProcessing()
-	{
-		Quotient<Double> oper = new Quotient<Double>();
-		TestSink quotientSink = new TestSink();
-		oper.quotient.setSink(quotientSink);
+    @Override
+    public int getCount(boolean reset)
+    {
+      throw new UnsupportedOperationException("Not supported yet.");
+    }
+  }
 
-		oper.setMult_by(2);
+  /**
+   * Test oper logic emits correct results.
+   */
+  @Test
+  public void testNodeSchemaProcessing()
+  {
+    Quotient<Double> oper = new Quotient<Double>();
+    TestSink quotientSink = new TestSink();
+    oper.quotient.setSink(quotientSink);
 
-		oper.beginWindow(0); //
-		Double a = new Double(30.0);
-		Double b = new Double(20.0);
-		Double c = new Double(100.0);
-		oper.denominator.process(a);
-		oper.denominator.process(b);
-		oper.denominator.process(c);
+    oper.setMult_by(2);
 
-		a = 5.0;
-		oper.numerator.process(a);
-		a = 1.0;
-		oper.numerator.process(a);
-		b = 44.0;
-		oper.numerator.process(b);
+    oper.beginWindow(0); //
+    Double a = 30.0;
+    Double b = 20.0;
+    Double c = 100.0;
+    oper.denominator.process(a);
+    oper.denominator.process(b);
+    oper.denominator.process(c);
 
-		b = 10.0;
-		oper.numerator.process(b);
-		c = 22.0;
-		oper.numerator.process(c);
-		c = 18.0;
-		oper.numerator.process(c);
+    a = 5.0;
+    oper.numerator.process(a);
+    a = 1.0;
+    oper.numerator.process(a);
+    b = 44.0;
+    oper.numerator.process(b);
 
-		a = 0.5;
-		oper.numerator.process(a);
-		b = 41.5;
-		oper.numerator.process(b);
-		a = 8.0;
-		oper.numerator.process(a);
-		oper.endWindow(); //
+    b = 10.0;
+    oper.numerator.process(b);
+    c = 22.0;
+    oper.numerator.process(c);
+    c = 18.0;
+    oper.numerator.process(c);
 
-		// payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
-		Assert.assertEquals("number emitted tuples", 1,
-				quotientSink.collectedTuples.size());
-		for (Object o : quotientSink.collectedTuples) { // sum is 1157
-			Double val = (Double) o;
-			Assert.assertEquals("emitted quotient value was ", new Double(2.0), val);
-		}
-	}
+    a = 0.5;
+    oper.numerator.process(a);
+    b = 41.5;
+    oper.numerator.process(b);
+    a = 8.0;
+    oper.numerator.process(a);
+    oper.endWindow(); //
+
+    // payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
+    Assert.assertEquals("number emitted tuples", 1,
+        quotientSink.collectedTuples.size());
+    for (Object o : quotientSink.collectedTuples) { // sum is 1157
+      Double val = (Double)o;
+      Assert.assertEquals("emitted quotient value was ", new Double(2.0), val);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/RangeKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/RangeKeyValTest.java b/library/src/test/java/com/datatorrent/lib/math/RangeKeyValTest.java
index a6d8bc8..57727b4 100644
--- a/library/src/test/java/com/datatorrent/lib/math/RangeKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/RangeKeyValTest.java
@@ -60,33 +60,29 @@ public class RangeKeyValTest<V extends Number>
     int numtuples = 1000;
     if (type.equals("integer")) {
       for (int i = -10; i < numtuples; i++) {
-        node.data.process(new KeyValPair<String, Integer>("a", new Integer(i)));
+        node.data.process(new KeyValPair<String, Integer>("a", i));
       }
-    }
-    else if (type.equals("double")) {
+    } else if (type.equals("double")) {
       for (int i = -10; i < numtuples; i++) {
-        node.data.process(new KeyValPair<String, Double>("a", new Double(i)));
+        node.data.process(new KeyValPair<String, Double>("a", (double)i));
       }
-    }
-    else if (type.equals("long")) {
+    } else if (type.equals("long")) {
       for (int i = -10; i < numtuples; i++) {
-        node.data.process(new KeyValPair<String, Long>("a", new Long(i)));
+        node.data.process(new KeyValPair<String, Long>("a", (long)i));
       }
-    }
-    else if (type.equals("short")) {
+    } else if (type.equals("short")) {
       for (short i = -10; i < numtuples; i++) {
-        node.data.process(new KeyValPair<String, Short>("a", new Short(i)));
+        node.data.process(new KeyValPair<String, Short>("a", i));
       }
-    }
-    else if (type.equals("float")) {
+    } else if (type.equals("float")) {
       for (int i = -10; i < numtuples; i++) {
-        node.data.process(new KeyValPair<String, Float>("a", new Float(i)));
+        node.data.process(new KeyValPair<String, Float>("a", (float)i));
       }
     }
 
     node.endWindow();
-    Assert.assertEquals("high was ", new Double(999.0), rangeSink.high, 0);
-    Assert.assertEquals("low was ", new Double(-10.0), rangeSink.low, 0);
+    Assert.assertEquals("high was ", 999.0, rangeSink.high, 0);
+    Assert.assertEquals("low was ", -10.0, rangeSink.low, 0);
     log.debug(String.format("\nTested %d tuples", numtuples));
   }
 
@@ -100,8 +96,8 @@ public class RangeKeyValTest<V extends Number>
     @Override
     public void put(Object payload)
     {
-      KeyValPair<String, Object> tuple = (KeyValPair<String, Object>) payload;
-      HighLow<V> hl = (HighLow<V>) tuple.getValue();
+      KeyValPair<String, Object> tuple = (KeyValPair<String, Object>)payload;
+      HighLow<V> hl = (HighLow<V>)tuple.getValue();
       high = hl.getHigh().doubleValue();
       low = hl.getLow().doubleValue();
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/RangeTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/RangeTest.java b/library/src/test/java/com/datatorrent/lib/math/RangeTest.java
index 931486f..c708c2a 100644
--- a/library/src/test/java/com/datatorrent/lib/math/RangeTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/RangeTest.java
@@ -33,80 +33,80 @@ import com.datatorrent.lib.util.HighLow;
  */
 public class RangeTest<V extends Number>
 {
-	@SuppressWarnings("rawtypes")
-	class TestSink implements Sink
-	{
-		List<Object> collectedTuples = new ArrayList<Object>();
-
-		@Override
-		public void put(Object payload)
-		{
-          collectedTuples.add(payload);
-		}
-
-		@Override
-		public int getCount(boolean reset)
-		{
-			throw new UnsupportedOperationException("Not supported yet.");
-		}
-	}
-
-	/**
-	 * Test oper logic emits correct results
-	 */
-	@SuppressWarnings("unchecked")
-	@Test
-	public void testNodeSchemaProcessing()
-	{
-		Range<Double> oper = new Range<Double>();
-		TestSink rangeSink = new TestSink();
-		oper.range.setSink(rangeSink);
-
-		oper.beginWindow(0); //
-
-		int numTuples = 1000;
-		for (int i = 0; i < numTuples; i++) {
-			Double a = new Double(20.0);
-			Double b = new Double(2.0);
-			Double c = new Double(1000.0);
-
-			oper.data.process(a);
-			oper.data.process(b);
-			oper.data.process(c);
-
-			a = 1.0;
-			oper.data.process(a);
-			a = 10.0;
-			oper.data.process(a);
-			b = 5.0;
-			oper.data.process(b);
-
-			b = 12.0;
-			oper.data.process(b);
-			c = 22.0;
-			oper.data.process(c);
-			c = 14.0;
-			oper.data.process(c);
-
-			a = 46.0;
-			oper.data.process(a);
-			b = 2.0;
-			oper.data.process(b);
-			a = 23.0;
-			oper.data.process(a);
-		}
-
-		oper.endWindow(); //
-
-		// payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
-		Assert.assertEquals("number emitted tuples", 1,
-				rangeSink.collectedTuples.size());
-		for (Object o : rangeSink.collectedTuples) {
-			HighLow<V> hl = (HighLow<V>) o;
-			Assert.assertEquals("emitted high value was ", new Double(1000.0),
-					hl.getHigh());
-			Assert.assertEquals("emitted low value was ", new Double(1.0),
-					hl.getLow());
-		}
-	}
+
+  @SuppressWarnings("rawtypes")
+  class TestSink implements Sink
+  {
+    List<Object> collectedTuples = new ArrayList<Object>();
+
+    @Override
+    public void put(Object payload)
+    {
+      collectedTuples.add(payload);
+    }
+
+    @Override
+    public int getCount(boolean reset)
+    {
+      throw new UnsupportedOperationException("Not supported yet.");
+    }
+  }
+
+  /**
+   * Test oper logic emits correct results
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void testNodeSchemaProcessing()
+  {
+    Range<Double> oper = new Range<Double>();
+    TestSink rangeSink = new TestSink();
+    oper.range.setSink(rangeSink);
+
+    oper.beginWindow(0); //
+
+    int numTuples = 1000;
+    for (int i = 0; i < numTuples; i++) {
+      Double a = new Double(20.0);
+      Double b = new Double(2.0);
+      Double c = new Double(1000.0);
+
+      oper.data.process(a);
+      oper.data.process(b);
+      oper.data.process(c);
+
+      a = 1.0;
+      oper.data.process(a);
+      a = 10.0;
+      oper.data.process(a);
+      b = 5.0;
+      oper.data.process(b);
+
+      b = 12.0;
+      oper.data.process(b);
+      c = 22.0;
+      oper.data.process(c);
+      c = 14.0;
+      oper.data.process(c);
+
+      a = 46.0;
+      oper.data.process(a);
+      b = 2.0;
+      oper.data.process(b);
+      a = 23.0;
+      oper.data.process(a);
+    }
+
+    oper.endWindow(); //
+
+    // payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
+    Assert.assertEquals("number emitted tuples", 1, rangeSink.collectedTuples.size());
+    for (Object o : rangeSink.collectedTuples) {
+      HighLow<V> hl = (HighLow<V>)o;
+      Assert.assertEquals("emitted high value was ", 1000.0,
+          hl.getHigh());
+      Assert.assertEquals("emitted low value was ", 1.0,
+          hl.getLow());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/RunningAverageTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/RunningAverageTest.java b/library/src/test/java/com/datatorrent/lib/math/RunningAverageTest.java
index d84ecf3..52da631 100644
--- a/library/src/test/java/com/datatorrent/lib/math/RunningAverageTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/RunningAverageTest.java
@@ -18,50 +18,50 @@
  */
 package com.datatorrent.lib.math;
 
-import static org.junit.Assert.assertEquals;
-
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.junit.Assert.assertEquals;
+
 /**
  * Functional tests for {@link com.datatorrent.lib.math.RunningAverage}
  */
 public class RunningAverageTest
 {
-	public RunningAverageTest()
-	{
-	}
+  public RunningAverageTest()
+  {
+  }
 
-	@Test
-	public void testLogicForSmallValues()
-	{
-		logger.debug("small values");
-		RunningAverage instance = new RunningAverage();
-		instance.input.process(1.0);
+  @Test
+  public void testLogicForSmallValues()
+  {
+    logger.debug("small values");
+    RunningAverage instance = new RunningAverage();
+    instance.input.process(1.0);
 
-		assertEquals("first average", 1.0, instance.average, 0.00001);
-		assertEquals("first count", 1, instance.count);
+    assertEquals("first average", 1.0, instance.average, 0.00001);
+    assertEquals("first count", 1, instance.count);
 
-		instance.input.process(2.0);
+    instance.input.process(2.0);
 
-		assertEquals("second average", 1.5, instance.average, 0.00001);
-		assertEquals("second count", 2, instance.count);
-	}
+    assertEquals("second average", 1.5, instance.average, 0.00001);
+    assertEquals("second count", 2, instance.count);
+  }
 
-	@Test
-	public void testLogicForLargeValues()
-	{
-		logger.debug("large values");
-		RunningAverage instance = new RunningAverage();
-		instance.input.process(Long.MAX_VALUE);
+  @Test
+  public void testLogicForLargeValues()
+  {
+    logger.debug("large values");
+    RunningAverage instance = new RunningAverage();
+    instance.input.process(Long.MAX_VALUE);
 
-		assertEquals("first average", Long.MAX_VALUE, (long) instance.average);
+    assertEquals("first average", Long.MAX_VALUE, (long)instance.average);
 
-		instance.input.process(Long.MAX_VALUE);
-		assertEquals("second average", Long.MAX_VALUE, (long) instance.average);
-	}
+    instance.input.process(Long.MAX_VALUE);
+    assertEquals("second average", Long.MAX_VALUE, (long)instance.average);
+  }
 
-	private static final Logger logger = LoggerFactory
-			.getLogger(RunningAverageTest.class);
+  private static final Logger logger = LoggerFactory
+      .getLogger(RunningAverageTest.class);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java b/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java
index 671826a..e968dba 100644
--- a/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java
@@ -33,42 +33,42 @@ import com.datatorrent.lib.testbench.SumTestSink;
  */
 public class SigmaTest
 {
-	/**
-	 * Test oper logic emits correct results
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testNodeSchemaProcessing()
-	{
-		Sigma oper = new Sigma();
-		SumTestSink lmultSink = new SumTestSink();
-		SumTestSink imultSink = new SumTestSink();
-		SumTestSink dmultSink = new SumTestSink();
-		SumTestSink fmultSink = new SumTestSink();
-		oper.longResult.setSink(lmultSink);
-		oper.integerResult.setSink(imultSink);
-		oper.doubleResult.setSink(dmultSink);
-		oper.floatResult.setSink(fmultSink);
+  /**
+   * Test oper logic emits correct results
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
+  public void testNodeSchemaProcessing()
+  {
+    Sigma oper = new Sigma();
+    SumTestSink lmultSink = new SumTestSink();
+    SumTestSink imultSink = new SumTestSink();
+    SumTestSink dmultSink = new SumTestSink();
+    SumTestSink fmultSink = new SumTestSink();
+    oper.longResult.setSink(lmultSink);
+    oper.integerResult.setSink(imultSink);
+    oper.doubleResult.setSink(dmultSink);
+    oper.floatResult.setSink(fmultSink);
 
-		int sum = 0;
-		ArrayList<Integer> list = new ArrayList<Integer>();
-		for (int i = 0; i < 100; i++) {
-			list.add(i);
-			sum += i;
-		}
+    int sum = 0;
+    ArrayList<Integer> list = new ArrayList<Integer>();
+    for (int i = 0; i < 100; i++) {
+      list.add(i);
+      sum += i;
+    }
 
-		oper.beginWindow(0); //
-		oper.input.process(list);
-		oper.endWindow(); //
+    oper.beginWindow(0); //
+    oper.input.process(list);
+    oper.endWindow(); //
 
-		oper.beginWindow(1); //
-		oper.input.process(list);
-		oper.endWindow(); //
-		sum = sum * 2;
+    oper.beginWindow(1); //
+    oper.input.process(list);
+    oper.endWindow(); //
+    sum = sum * 2;
 
-		Assert.assertEquals("sum was", sum, lmultSink.val.intValue());
-		Assert.assertEquals("sum was", sum, imultSink.val.intValue());
-		Assert.assertEquals("sum was", sum, dmultSink.val.intValue());
-		Assert.assertEquals("sum", sum, fmultSink.val.intValue());
-	}
+    Assert.assertEquals("sum was", sum, lmultSink.val.intValue());
+    Assert.assertEquals("sum was", sum, imultSink.val.intValue());
+    Assert.assertEquals("sum was", sum, dmultSink.val.intValue());
+    Assert.assertEquals("sum", sum, fmultSink.val.intValue());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/SquareCalculusTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/SquareCalculusTest.java b/library/src/test/java/com/datatorrent/lib/math/SquareCalculusTest.java
index 27f7464..7f5ab53 100644
--- a/library/src/test/java/com/datatorrent/lib/math/SquareCalculusTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/SquareCalculusTest.java
@@ -28,35 +28,35 @@ import com.datatorrent.lib.testbench.SumTestSink;
  */
 public class SquareCalculusTest
 {
-	/**
-	 * Test oper logic emits correct results
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testNodeSchemaProcessing()
-	{
-		SquareCalculus oper = new SquareCalculus();
-		SumTestSink lmultSink = new SumTestSink();
-		SumTestSink imultSink = new SumTestSink();
-		SumTestSink dmultSink = new SumTestSink();
-		SumTestSink fmultSink = new SumTestSink();
-		oper.longResult.setSink(lmultSink);
-		oper.integerResult.setSink(imultSink);
-		oper.doubleResult.setSink(dmultSink);
-		oper.floatResult.setSink(fmultSink);
+  /**
+   * Test oper logic emits correct results
+   */
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  @Test
+  public void testNodeSchemaProcessing()
+  {
+    SquareCalculus oper = new SquareCalculus();
+    SumTestSink lmultSink = new SumTestSink();
+    SumTestSink imultSink = new SumTestSink();
+    SumTestSink dmultSink = new SumTestSink();
+    SumTestSink fmultSink = new SumTestSink();
+    oper.longResult.setSink(lmultSink);
+    oper.integerResult.setSink(imultSink);
+    oper.doubleResult.setSink(dmultSink);
+    oper.floatResult.setSink(fmultSink);
 
-		oper.beginWindow(0); //
-		int sum = 0;
-		for (int i = 0; i < 50; i++) {
-			Integer t = i;
-			oper.input.process(t);
-			sum += i * i;
-		}
-		oper.endWindow(); //
+    oper.beginWindow(0); //
+    int sum = 0;
+    for (int i = 0; i < 50; i++) {
+      Integer t = i;
+      oper.input.process(t);
+      sum += i * i;
+    }
+    oper.endWindow(); //
 
-		Assert.assertEquals("sum was", sum, lmultSink.val.intValue());
-		Assert.assertEquals("sum was", sum, imultSink.val.intValue());
-		Assert.assertEquals("sum was", sum, dmultSink.val.intValue());
-		Assert.assertEquals("sum", sum, fmultSink.val.intValue());
-	}
+    Assert.assertEquals("sum was", sum, lmultSink.val.intValue());
+    Assert.assertEquals("sum was", sum, imultSink.val.intValue());
+    Assert.assertEquals("sum was", sum, dmultSink.val.intValue());
+    Assert.assertEquals("sum", sum, fmultSink.val.intValue());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/SumCountMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/SumCountMapTest.java b/library/src/test/java/com/datatorrent/lib/math/SumCountMapTest.java
index 5cca950..b0c7b38 100644
--- a/library/src/test/java/com/datatorrent/lib/math/SumCountMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/SumCountMapTest.java
@@ -31,125 +31,124 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class SumCountMapTest
 {
-	/**
-	 * Test operator logic emits correct results.
-	 */
-	@Test
-	public void testNodeProcessing()
-	{
-		testNodeSchemaProcessing(true, true);
-		testNodeSchemaProcessing(true, false);
-		testNodeSchemaProcessing(false, true);
-		testNodeSchemaProcessing(false, false);
-	}
+  /**
+   * Test operator logic emits correct results.
+   */
+  @Test
+  public void testNodeProcessing()
+  {
+    testNodeSchemaProcessing(true, true);
+    testNodeSchemaProcessing(true, false);
+    testNodeSchemaProcessing(false, true);
+    testNodeSchemaProcessing(false, false);
+  }
 
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public void testNodeSchemaProcessing(boolean sum, boolean count)
-	{
-		SumCountMap<String, Double> oper = new SumCountMap<String, Double>();
-		oper.setType(Double.class);
-		CollectorTestSink sumSink = new CollectorTestSink();
-		CollectorTestSink countSink = new CollectorTestSink();
-		if (sum) {
-			oper.sum.setSink(sumSink);
-		}
-		if (count) {
-			oper.count.setSink(countSink);
-		}
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  public void testNodeSchemaProcessing(boolean sum, boolean count)
+  {
+    SumCountMap<String, Double> oper = new SumCountMap<String, Double>();
+    oper.setType(Double.class);
+    CollectorTestSink sumSink = new CollectorTestSink();
+    CollectorTestSink countSink = new CollectorTestSink();
+    if (sum) {
+      oper.sum.setSink(sumSink);
+    }
+    if (count) {
+      oper.count.setSink(countSink);
+    }
 
-		oper.beginWindow(0); //
+    oper.beginWindow(0); //
 
-		HashMap<String, Double> input = new HashMap<String, Double>();
+    HashMap<String, Double> input = new HashMap<String, Double>();
 
-		input.put("a", 2.0);
-		input.put("b", 20.0);
-		input.put("c", 1000.0);
-		oper.data.process(input);
-		input.clear();
-		input.put("a", 1.0);
-		oper.data.process(input);
-		input.clear();
-		input.put("a", 10.0);
-		input.put("b", 5.0);
-		oper.data.process(input);
-		input.clear();
-		input.put("d", 55.0);
-		input.put("b", 12.0);
-		oper.data.process(input);
-		input.clear();
-		input.put("d", 22.0);
-		oper.data.process(input);
-		input.clear();
-		input.put("d", 14.2);
-		oper.data.process(input);
-		input.clear();
+    input.put("a", 2.0);
+    input.put("b", 20.0);
+    input.put("c", 1000.0);
+    oper.data.process(input);
+    input.clear();
+    input.put("a", 1.0);
+    oper.data.process(input);
+    input.clear();
+    input.put("a", 10.0);
+    input.put("b", 5.0);
+    oper.data.process(input);
+    input.clear();
+    input.put("d", 55.0);
+    input.put("b", 12.0);
+    oper.data.process(input);
+    input.clear();
+    input.put("d", 22.0);
+    oper.data.process(input);
+    input.clear();
+    input.put("d", 14.2);
+    oper.data.process(input);
+    input.clear();
 
-		// Mix integers and doubles
-		HashMap<String, Double> inputi = new HashMap<String, Double>();
-		inputi.put("d", 46.0);
-		inputi.put("e", 2.0);
-		oper.data.process(inputi);
-		inputi.clear();
-		inputi.put("a", 23.0);
-		inputi.put("d", 4.0);
-		oper.data.process(inputi);
-		inputi.clear();
+    // Mix integers and doubles
+    HashMap<String, Double> inputi = new HashMap<String, Double>();
+    inputi.put("d", 46.0);
+    inputi.put("e", 2.0);
+    oper.data.process(inputi);
+    inputi.clear();
+    inputi.put("a", 23.0);
+    inputi.put("d", 4.0);
+    oper.data.process(inputi);
+    inputi.clear();
 
-		oper.endWindow(); //
+    oper.endWindow(); //
 
-		if (sum) {
-			// payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
-			Assert.assertEquals("number emitted tuples", 1,
-					sumSink.collectedTuples.size());
-			for (Object o : sumSink.collectedTuples) {
-				HashMap<String, Object> output = (HashMap<String, Object>) o;
-				for (Map.Entry<String, Object> e : output.entrySet()) {
-					Double val = (Double) e.getValue();
-					if (e.getKey().equals("a")) {
-						Assert.assertEquals("emitted value for 'a' was ", new Double(36),
-								val);
-					} else if (e.getKey().equals("b")) {
-						Assert.assertEquals("emitted tuple for 'b' was ", new Double(37),
-								val);
-					} else if (e.getKey().equals("c")) {
-						Assert.assertEquals("emitted tuple for 'c' was ", new Double(1000),
-								val);
-					} else if (e.getKey().equals("d")) {
-						Assert.assertEquals("emitted tuple for 'd' was ",
-								new Double(141.2), val);
-					} else if (e.getKey().equals("e")) {
-						Assert.assertEquals("emitted tuple for 'e' was ", new Double(2),
-								val);
-					}
-				}
-			}
-		}
-		if (count) {
-			// payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
-			Assert.assertEquals("number emitted tuples", 1,
-					countSink.collectedTuples.size());
-			for (Object o : countSink.collectedTuples) {
-				HashMap<String, Object> output = (HashMap<String, Object>) o;
-				for (Map.Entry<String, Object> e : output.entrySet()) {
-					Integer val = (Integer) e.getValue();
-					if (e.getKey().equals("a")) {
-						Assert
-								.assertEquals("emitted value for 'a' was ", 4, val.intValue());
-					} else if (e.getKey().equals("b")) {
-						Assert
-								.assertEquals("emitted tuple for 'b' was ", 3, val.intValue());
-					} else if (e.getKey().equals("c")) {
-						Assert
-								.assertEquals("emitted tuple for 'c' was ", 1, val.intValue());
-					} else if (e.getKey().equals("d")) {
-						Assert
-								.assertEquals("emitted tuple for 'd' was ", 5, val.intValue());
-					} else if (e.getKey().equals("e")) {
-						Assert
-								.assertEquals("emitted tuple for 'e' was ", 1, val.intValue());
-					}
-				}
-			}
-		}
-	}
+    if (sum) {
+      // payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
+      Assert.assertEquals("number emitted tuples", 1, sumSink.collectedTuples.size());
+
+      for (Object o : sumSink.collectedTuples) {
+        HashMap<String, Object> output = (HashMap<String, Object>)o;
+        for (Map.Entry<String, Object> e : output.entrySet()) {
+          Double val = (Double)e.getValue();
+          if (e.getKey().equals("a")) {
+            Assert.assertEquals("emitted value for 'a' was ", new Double(36),
+                val);
+          } else if (e.getKey().equals("b")) {
+            Assert.assertEquals("emitted tuple for 'b' was ", new Double(37),
+                val);
+          } else if (e.getKey().equals("c")) {
+            Assert.assertEquals("emitted tuple for 'c' was ", new Double(1000),
+                val);
+          } else if (e.getKey().equals("d")) {
+            Assert.assertEquals("emitted tuple for 'd' was ",
+                new Double(141.2), val);
+          } else if (e.getKey().equals("e")) {
+            Assert.assertEquals("emitted tuple for 'e' was ", new Double(2),
+                val);
+          }
+        }
+      }
+    }
+    if (count) {
+      // payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
+      Assert.assertEquals("number emitted tuples", 1, countSink.collectedTuples.size());
+      for (Object o : countSink.collectedTuples) {
+        HashMap<String, Object> output = (HashMap<String, Object>)o;
+        for (Map.Entry<String, Object> e : output.entrySet()) {
+          Integer val = (Integer)e.getValue();
+          if (e.getKey().equals("a")) {
+            Assert
+                .assertEquals("emitted value for 'a' was ", 4, val.intValue());
+          } else if (e.getKey().equals("b")) {
+            Assert
+                .assertEquals("emitted tuple for 'b' was ", 3, val.intValue());
+          } else if (e.getKey().equals("c")) {
+            Assert
+                .assertEquals("emitted tuple for 'c' was ", 1, val.intValue());
+          } else if (e.getKey().equals("d")) {
+            Assert
+                .assertEquals("emitted tuple for 'd' was ", 5, val.intValue());
+          } else if (e.getKey().equals("e")) {
+            Assert
+                .assertEquals("emitted tuple for 'e' was ", 1, val.intValue());
+          }
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/SumKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/SumKeyValTest.java b/library/src/test/java/com/datatorrent/lib/math/SumKeyValTest.java
index 94e4806..336ddcb 100644
--- a/library/src/test/java/com/datatorrent/lib/math/SumKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/SumKeyValTest.java
@@ -33,7 +33,7 @@ public class SumKeyValTest
    * Test operator logic emits correct results.
    */
   @SuppressWarnings({ "unchecked", "rawtypes" })
-	@Test
+  @Test
   public void testNodeProcessing()
   {
     SumKeyVal<String, Double> oper = new SumKeyVal<String, Double>();
@@ -67,17 +67,13 @@ public class SumKeyValTest
       Double val = (Double)e.getValue();
       if (e.getKey().equals("a")) {
         Assert.assertEquals("emitted value for 'a' was ", new Double(36), val);
-      }
-      else if (e.getKey().equals("b")) {
+      } else if (e.getKey().equals("b")) {
         Assert.assertEquals("emitted tuple for 'b' was ", new Double(37), val);
-      }
-      else if (e.getKey().equals("c")) {
+      } else if (e.getKey().equals("c")) {
         Assert.assertEquals("emitted tuple for 'c' was ", new Double(1000), val);
-      }
-      else if (e.getKey().equals("d")) {
+      } else if (e.getKey().equals("d")) {
         Assert.assertEquals("emitted tuple for 'd' was ", new Double(141.2), val);
-      }
-      else if (e.getKey().equals("e")) {
+      } else if (e.getKey().equals("e")) {
         Assert.assertEquals("emitted tuple for 'e' was ", new Double(2), val);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/SumTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/SumTest.java b/library/src/test/java/com/datatorrent/lib/math/SumTest.java
index 447f56a..e9962f0 100644
--- a/library/src/test/java/com/datatorrent/lib/math/SumTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/SumTest.java
@@ -29,76 +29,76 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class SumTest
 {
-	/**
-	 * Test operator logic emits correct results.
-	 */
-	@Test
-	public void testNodeTypeProcessing()
-	{
-		Sum<Double> doper = new Sum<Double>();
-		Sum<Float> foper = new Sum<Float>();
-		Sum<Integer> ioper = new Sum<Integer>();
-		Sum<Long> loper = new Sum<Long>();
-		Sum<Short> soper = new Sum<Short>();
-		doper.setType(Double.class);
-		foper.setType(Float.class);
-		ioper.setType(Integer.class);
-		loper.setType(Long.class);
-		soper.setType(Short.class);
+  /**
+   * Test operator logic emits correct results.
+   */
+  @Test
+  public void testNodeTypeProcessing()
+  {
+    Sum<Double> doper = new Sum<Double>();
+    Sum<Float> foper = new Sum<Float>();
+    Sum<Integer> ioper = new Sum<Integer>();
+    Sum<Long> loper = new Sum<Long>();
+    Sum<Short> soper = new Sum<Short>();
+    doper.setType(Double.class);
+    foper.setType(Float.class);
+    ioper.setType(Integer.class);
+    loper.setType(Long.class);
+    soper.setType(Short.class);
 
-		testNodeSchemaProcessing(doper);
-		testNodeSchemaProcessing(foper);
-		testNodeSchemaProcessing(ioper);
-		testNodeSchemaProcessing(loper);
-		testNodeSchemaProcessing(soper);
-	}
+    testNodeSchemaProcessing(doper);
+    testNodeSchemaProcessing(foper);
+    testNodeSchemaProcessing(ioper);
+    testNodeSchemaProcessing(loper);
+    testNodeSchemaProcessing(soper);
+  }
 
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public void testNodeSchemaProcessing(Sum oper)
-	{
-		CollectorTestSink sumSink = new CollectorTestSink();
-		oper.sum.setSink(sumSink);
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  public void testNodeSchemaProcessing(Sum oper)
+  {
+    CollectorTestSink sumSink = new CollectorTestSink();
+    oper.sum.setSink(sumSink);
 
-		oper.beginWindow(0); //
+    oper.beginWindow(0); //
 
-		Double a = new Double(2.0);
-		Double b = new Double(20.0);
-		Double c = new Double(1000.0);
+    Double a = 2.0;
+    Double b = 20.0;
+    Double c = 1000.0;
 
-		oper.data.process(a);
-		oper.data.process(b);
-		oper.data.process(c);
+    oper.data.process(a);
+    oper.data.process(b);
+    oper.data.process(c);
 
-		a = 1.0;
-		oper.data.process(a);
-		a = 10.0;
-		oper.data.process(a);
-		b = 5.0;
-		oper.data.process(b);
+    a = 1.0;
+    oper.data.process(a);
+    a = 10.0;
+    oper.data.process(a);
+    b = 5.0;
+    oper.data.process(b);
 
-		b = 12.0;
-		oper.data.process(b);
-		c = 22.0;
-		oper.data.process(c);
-		c = 14.0;
-		oper.data.process(c);
+    b = 12.0;
+    oper.data.process(b);
+    c = 22.0;
+    oper.data.process(c);
+    c = 14.0;
+    oper.data.process(c);
 
-		a = 46.0;
-		oper.data.process(a);
-		b = 2.0;
-		oper.data.process(b);
-		a = 23.0;
-		oper.data.process(a);
+    a = 46.0;
+    oper.data.process(a);
+    b = 2.0;
+    oper.data.process(b);
+    a = 23.0;
+    oper.data.process(a);
 
-		oper.endWindow(); //
+    oper.endWindow(); //
 
-		// payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
-		Assert.assertEquals("number emitted tuples", 1,
-				sumSink.collectedTuples.size());
-		for (Object o : sumSink.collectedTuples) { // sum is 1157
-			Double val = ((Number) o).doubleValue();
-			Assert
-					.assertEquals("emitted sum value was was ", new Double(1157.0), val);
-		}
-	}
+    // payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
+    Assert.assertEquals("number emitted tuples", 1,
+        sumSink.collectedTuples.size());
+    for (Object o : sumSink.collectedTuples) { // sum is 1157
+
+      Double val = ((Number)o).doubleValue();
+      Assert.assertEquals("emitted sum value was was ", new Double(1157.0), val);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProductTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProductTest.java b/library/src/test/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProductTest.java
index 50ba311..dae96e4 100644
--- a/library/src/test/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProductTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProductTest.java
@@ -112,7 +112,8 @@ public class XmlKeyValueStringCartesianProductTest
     Assert.assertEquals("Output 4", "g=vg1,k=vk1", collectedTuples.get(7));
   }
 
-  List<String> testOperator(String xml, String config) {
+  List<String> testOperator(String xml, String config)
+  {
     XmlKeyValueStringCartesianProduct operator = new XmlKeyValueStringCartesianProduct();
     operator.setConfig(config);
     operator.setup(null);
@@ -123,7 +124,7 @@ public class XmlKeyValueStringCartesianProductTest
       public void put(Object o)
       {
         if (o instanceof String) {
-          collectedTuples.add((String) o);
+          collectedTuples.add((String)o);
         }
       }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyValTest.java b/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyValTest.java
index e5ffea8..cad0e8c 100644
--- a/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyValTest.java
@@ -31,37 +31,37 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 public class MultiWindowRangeKeyValTest
 {
-	private static Logger log = LoggerFactory.getLogger(MultiWindowRangeKeyValTest.class);
+  private static Logger log = LoggerFactory.getLogger(MultiWindowRangeKeyValTest.class);
 
-	/**
-	 * Test functional logic
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  /**
+   * Test functional logic
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   @Test
-	public void testNodeProcessing() throws InterruptedException
-	{
-		MultiWindowRangeKeyVal<String, Integer> oper = new MultiWindowRangeKeyVal<String, Integer>();
+  public void testNodeProcessing() throws InterruptedException
+  {
+    MultiWindowRangeKeyVal<String, Integer> oper = new MultiWindowRangeKeyVal<String, Integer>();
 
-		CollectorTestSink swinSink = new CollectorTestSink();
-		oper.range.setSink(swinSink);
+    CollectorTestSink swinSink = new CollectorTestSink();
+    oper.range.setSink(swinSink);
 
-		oper.beginWindow(0);
-		KeyValPair<String, Integer> low = new KeyValPair<String, Integer>("a", 3);
-		oper.data.process(low);
-		KeyValPair<String, Integer> high = new KeyValPair<String, Integer>("a", 11);
-		oper.data.process(high);
-		oper.endWindow();
+    oper.beginWindow(0);
+    KeyValPair<String, Integer> low = new KeyValPair<String, Integer>("a", 3);
+    oper.data.process(low);
+    KeyValPair<String, Integer> high = new KeyValPair<String, Integer>("a", 11);
+    oper.data.process(high);
+    oper.endWindow();
 
-		oper.beginWindow(1);
-		low = new KeyValPair<String, Integer>("a", 1);
-		oper.data.process(low);
-		high = new KeyValPair<String, Integer>("a", 9);
-		oper.data.process(high);
-		oper.endWindow();
+    oper.beginWindow(1);
+    low = new KeyValPair<String, Integer>("a", 1);
+    oper.data.process(low);
+    high = new KeyValPair<String, Integer>("a", 9);
+    oper.data.process(high);
+    oper.endWindow();
 
-		Assert.assertEquals("number emitted tuples", 1, swinSink.collectedTuples.size());
-		for (Object o : swinSink.collectedTuples) {
-			log.debug(o.toString());
-		}
-	}
+    Assert.assertEquals("number emitted tuples", 1, swinSink.collectedTuples.size());
+    for (Object o : swinSink.collectedTuples) {
+      log.debug(o.toString());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyValTest.java b/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyValTest.java
index 5573faf..0f9ccbd 100644
--- a/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyValTest.java
@@ -32,36 +32,36 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 public class MultiWindowSumKeyValTest
 {
-	private static Logger log = LoggerFactory.getLogger(MultiWindowSumKeyValTest.class);
-	/**
-	 * Test functional logic
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  private static Logger log = LoggerFactory.getLogger(MultiWindowSumKeyValTest.class);
+  /**
+   * Test functional logic
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   @Test
-	public void testNodeProcessing() throws InterruptedException
-	{
-		MultiWindowSumKeyVal<String, Integer> oper = new MultiWindowSumKeyVal<String, Integer>();
+  public void testNodeProcessing() throws InterruptedException
+  {
+    MultiWindowSumKeyVal<String, Integer> oper = new MultiWindowSumKeyVal<String, Integer>();
 
-		CollectorTestSink swinSink = new CollectorTestSink();
-		oper.sum.setSink(swinSink);
+    CollectorTestSink swinSink = new CollectorTestSink();
+    oper.sum.setSink(swinSink);
 
-		oper.beginWindow(0);
-		KeyValPair<String, Integer> low = new KeyValPair<String, Integer>("a", 3);
-		oper.data.process(low);
-		KeyValPair<String, Integer> high = new KeyValPair<String, Integer>("a", 11);
-		oper.data.process(high);
-		oper.endWindow();
+    oper.beginWindow(0);
+    KeyValPair<String, Integer> low = new KeyValPair<String, Integer>("a", 3);
+    oper.data.process(low);
+    KeyValPair<String, Integer> high = new KeyValPair<String, Integer>("a", 11);
+    oper.data.process(high);
+    oper.endWindow();
 
-		oper.beginWindow(1);
-		low = new KeyValPair<String, Integer>("a", 1);
-		oper.data.process(low);
-		high = new KeyValPair<String, Integer>("a", 9);
-		oper.data.process(high);
-		oper.endWindow();
+    oper.beginWindow(1);
+    low = new KeyValPair<String, Integer>("a", 1);
+    oper.data.process(low);
+    high = new KeyValPair<String, Integer>("a", 9);
+    oper.data.process(high);
+    oper.endWindow();
 
-		Assert.assertEquals("number emitted tuples", 1, swinSink.collectedTuples.size());
-		for (Object o : swinSink.collectedTuples) {
-			log.debug(o.toString());
-		}
-	}
+    Assert.assertEquals("number emitted tuples", 1, swinSink.collectedTuples.size());
+    for (Object o : swinSink.collectedTuples) {
+      log.debug(o.toString());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/multiwindow/SimpleMovingAverageTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/multiwindow/SimpleMovingAverageTest.java b/library/src/test/java/com/datatorrent/lib/multiwindow/SimpleMovingAverageTest.java
index 75be4bb..0d6a006 100644
--- a/library/src/test/java/com/datatorrent/lib/multiwindow/SimpleMovingAverageTest.java
+++ b/library/src/test/java/com/datatorrent/lib/multiwindow/SimpleMovingAverageTest.java
@@ -54,11 +54,10 @@ public class SimpleMovingAverageTest
     oper.endWindow();
     Assert.assertEquals("number emitted tuples", 2, sink.collectedTuples.size());
     for (int i = 0; i < 2; i++) {
-      KeyValPair<String, Double> pair = (KeyValPair<String, Double>) sink.collectedTuples.get(i);
+      KeyValPair<String, Double> pair = (KeyValPair<String, Double>)sink.collectedTuples.get(i);
       if (pair.getKey().equals("a")) {
         Assert.assertEquals("a SMA", 31.5, pair.getValue(), 0);
-      }
-      else {
+      } else {
         Assert.assertEquals("b SMA", 52.5, pair.getValue(), 0);
       }
     }
@@ -71,11 +70,10 @@ public class SimpleMovingAverageTest
     oper.endWindow();
     Assert.assertEquals("number emitted tuples", 4, sink.collectedTuples.size());
     for (int i = 2; i < 4; i++) {
-      KeyValPair<String, Double> pair = (KeyValPair<String, Double>) sink.collectedTuples.get(i);
+      KeyValPair<String, Double> pair = (KeyValPair<String, Double>)sink.collectedTuples.get(i);
       if (pair.getKey().equals("a")) {
         Assert.assertEquals("a SMA", 32.5, pair.getValue(), 0);
-      }
-      else {
+      } else {
         Assert.assertEquals("b SMA", 53.5, pair.getValue(), 0);
       }
     }
@@ -88,11 +86,10 @@ public class SimpleMovingAverageTest
     oper.endWindow();
     Assert.assertEquals("number emitted tuples", 6, sink.collectedTuples.size());
     for (int i = 4; i < 6; i++) {
-      KeyValPair<String, Double> pair = (KeyValPair<String, Double>) sink.collectedTuples.get(i);
+      KeyValPair<String, Double> pair = (KeyValPair<String, Double>)sink.collectedTuples.get(i);
       if (pair.getKey().equals("a")) {
         Assert.assertEquals("a SMA", 33.5, pair.getValue(), 0);
-      }
-      else {
+      } else {
         Assert.assertEquals("b SMA", 54.5, pair.getValue(), 0);
       }
     }
@@ -105,11 +102,10 @@ public class SimpleMovingAverageTest
     oper.endWindow();
     Assert.assertEquals("number emitted tuples", 8, sink.collectedTuples.size());
     for (int i = 6; i < 8; i++) {
-      KeyValPair<String, Double> pair = (KeyValPair<String, Double>) sink.collectedTuples.get(i);
+      KeyValPair<String, Double> pair = (KeyValPair<String, Double>)sink.collectedTuples.get(i);
       if (pair.getKey().equals("a")) {
         Assert.assertEquals("a SMA", 35.5, pair.getValue(), 0);
-      }
-      else {
+      } else {
         Assert.assertEquals("b SMA", 56.5, pair.getValue(), 0);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/multiwindow/SlidingWindowTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/multiwindow/SlidingWindowTest.java b/library/src/test/java/com/datatorrent/lib/multiwindow/SlidingWindowTest.java
index 02fce04..1a59b97 100644
--- a/library/src/test/java/com/datatorrent/lib/multiwindow/SlidingWindowTest.java
+++ b/library/src/test/java/com/datatorrent/lib/multiwindow/SlidingWindowTest.java
@@ -37,23 +37,24 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
 public class SlidingWindowTest
 {
 
-	public class TestSlidingWindow extends AbstractSlidingWindow<String, List<String>>
-	{
-		public final transient DefaultOutputPort<ArrayList<String>> out = new DefaultOutputPort<ArrayList<String>>();
+  public class TestSlidingWindow extends AbstractSlidingWindow<String, List<String>>
+  {
+    public final transient DefaultOutputPort<ArrayList<String>> out = new DefaultOutputPort<ArrayList<String>>();
 
-		ArrayList<String> tuples = new ArrayList<String>();
+    ArrayList<String> tuples = new ArrayList<String>();
 
-		@Override protected void processDataTuple(String tuple)
-		{
-			tuples.add(tuple);
-		}
+    @Override
+    protected void processDataTuple(String tuple)
+    {
+      tuples.add(tuple);
+    }
 
-		@Override
-		public void endWindow()
-		{
-			out.emit(tuples);
-			tuples = new ArrayList<String>();
-		}
+    @Override
+    public void endWindow()
+    {
+      out.emit(tuples);
+      tuples = new ArrayList<String>();
+    }
 
     @Override
     public List<String> createWindowState()
@@ -61,47 +62,46 @@ public class SlidingWindowTest
       return tuples;
     }
 
-	};
+  }
 
-	/**
-	 * Test functional logic
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  /**
+   * Test functional logic
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   @Test
-	public void testNodeProcessing() throws InterruptedException
-	{
-	  TestSlidingWindow oper = new TestSlidingWindow();
-
-		CollectorTestSink swinSink = new CollectorTestSink();
-		oper.out.setSink(swinSink);
-		oper.setWindowSize(3);
-		oper.setup(null);
-
-		oper.beginWindow(0);
-		oper.data.process("a0");
-		oper.data.process("b0");
-		oper.endWindow();
-
-		oper.beginWindow(1);
-		oper.data.process("a1");
-		oper.data.process("b1");
-		oper.endWindow();
-
-		oper.beginWindow(2);
-		oper.data.process("a2");
-		oper.data.process("b2");
-		oper.endWindow();
-
-		oper.beginWindow(3);
-		oper.data.process("a3");
-		oper.data.process("b3");
-		oper.endWindow();
-
-		Assert.assertEquals("number emitted tuples", 4,
-      swinSink.collectedTuples.size());
-		
-		Assert.assertEquals("Invalid second stream window state.", oper.getStreamingWindowState(1), Lists.newArrayList("a2", "b2"));
-		Assert.assertEquals("Invalid expired stream window state.", oper.lastExpiredWindowState, Lists.newArrayList("a0", "b0"));
-
-	}
+  public void testNodeProcessing() throws InterruptedException
+  {
+    TestSlidingWindow oper = new TestSlidingWindow();
+
+    CollectorTestSink swinSink = new CollectorTestSink();
+    oper.out.setSink(swinSink);
+    oper.setWindowSize(3);
+    oper.setup(null);
+
+    oper.beginWindow(0);
+    oper.data.process("a0");
+    oper.data.process("b0");
+    oper.endWindow();
+
+    oper.beginWindow(1);
+    oper.data.process("a1");
+    oper.data.process("b1");
+    oper.endWindow();
+
+    oper.beginWindow(2);
+    oper.data.process("a2");
+    oper.data.process("b2");
+    oper.endWindow();
+
+    oper.beginWindow(3);
+    oper.data.process("a3");
+    oper.data.process("b3");
+    oper.endWindow();
+
+    Assert.assertEquals("number emitted tuples", 4, swinSink.collectedTuples.size());
+
+    Assert.assertEquals("Invalid second stream window state.", oper.getStreamingWindowState(1), Lists.newArrayList("a2", "b2"));
+    Assert.assertEquals("Invalid expired stream window state.", oper.lastExpiredWindowState, Lists.newArrayList("a0", "b0"));
+
+  }
 }


[10/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNUniqueOperatorMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNUniqueOperatorMap.java b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNUniqueOperatorMap.java
index 120a22a..34fd332 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNUniqueOperatorMap.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNUniqueOperatorMap.java
@@ -40,13 +40,13 @@ public abstract class AbstractBaseNUniqueOperatorMap<K, V> extends AbstractBaseN
    * Override to decide the direction (ascending vs descending)
    * @return true if ascending
    */
-  abstract public boolean isAscending();
+  public abstract boolean isAscending();
 
   /**
    * Override to decide which port to emit to and its schema
    * @param tuple
    */
-  abstract public void emit(HashMap<K, ArrayList<HashMap<V,Integer>>> tuple);
+  public abstract void emit(HashMap<K, ArrayList<HashMap<V,Integer>>> tuple);
 
   /**
    * Inserts tuples into the queue
@@ -63,8 +63,7 @@ public abstract class AbstractBaseNUniqueOperatorMap<K, V> extends AbstractBaseN
         pqueue = new TopNUniqueSort<V>(5, n, isAscending());
         kmap.put(cloneKey(e.getKey()), pqueue);
         pqueue.offer(cloneValue(e.getValue()));
-      }
-      else {
+      } else {
         pqueue.offer(cloneValue(e.getValue()));
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/AbstractBaseSortOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseSortOperator.java b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseSortOperator.java
index 8454a7d..ac7f816 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseSortOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseSortOperator.java
@@ -115,10 +115,13 @@ public abstract class AbstractBaseSortOperator<K> extends BaseKeyOperator<K>
   }
 
 
-  abstract public boolean doEmitList();
-  abstract public boolean doEmitHash();
-  abstract public void emitToList(ArrayList<K> list);
-  abstract public void emitToHash(HashMap<K,Integer> map);
+  public abstract boolean doEmitList();
+
+  public abstract boolean doEmitHash();
+
+  public abstract void emitToList(ArrayList<K> list);
+
+  public abstract void emitToHash(HashMap<K,Integer> map);
 
   /**
    * Emit sorted tuple at end of window

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/AbstractDimensionTimeBucketOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractDimensionTimeBucketOperator.java b/library/src/main/java/com/datatorrent/lib/util/AbstractDimensionTimeBucketOperator.java
index 35efb48..5a52979 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractDimensionTimeBucketOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractDimensionTimeBucketOperator.java
@@ -20,18 +20,25 @@ package com.datatorrent.lib.util;
 
 import java.text.NumberFormat;
 import java.text.ParseException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.GregorianCalendar;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context.DAGContext;
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This is the base implementation of an operator.&nbsp;
@@ -79,7 +86,8 @@ public abstract class AbstractDimensionTimeBucketOperator extends BaseOperator
    * This is the input port which receives tuples that are maps from strings to objects.
    */
   @InputPortFieldAnnotation(optional = false)
-  public final transient DefaultInputPort<Map<String, Object>> in = new DefaultInputPort<Map<String, Object>>() {
+  public final transient DefaultInputPort<Map<String, Object>> in = new DefaultInputPort<Map<String, Object>>()
+  {
     @Override
     public void process(Map<String, Object> tuple)
     {
@@ -107,7 +115,6 @@ public abstract class AbstractDimensionTimeBucketOperator extends BaseOperator
           timeBucketList.add(String.format("m|%04d%02d%02d%02d%02d", calendar.get(Calendar.YEAR), calendar.get(Calendar.MONTH) + 1, calendar.get(Calendar.DAY_OF_MONTH), calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE)));
         }
 
-        // System.out.println(dimensionCombinations.size()+ " testing");
         for (String timeBucket : timeBucketList) {
           for (int[] dimensionCombination : dimensionCombinations) {
             String field = "0";
@@ -146,7 +153,7 @@ public abstract class AbstractDimensionTimeBucketOperator extends BaseOperator
   private List<String> valueKeyNames = new ArrayList<String>();
   private String timeKeyName;
   private long currentWindowId;
-  private long windowWidth =500;
+  private long windowWidth = 500;
   private int timeBucketFlags;
   private transient TimeZone timeZone = TimeZone.getTimeZone("GMT");
   private transient Calendar calendar = new GregorianCalendar(timeZone);
@@ -166,7 +173,7 @@ public abstract class AbstractDimensionTimeBucketOperator extends BaseOperator
     if (timeKeyName == null) {
       time = (currentWindowId >>> 32) * 1000 + windowWidth * (currentWindowId & 0xffffffffL);
     } else {
-      time = (Long) tuple.get(timeKeyName);
+      time = (Long)tuple.get(timeKeyName);
     }
     return time;
   }
@@ -174,13 +181,14 @@ public abstract class AbstractDimensionTimeBucketOperator extends BaseOperator
   protected Number extractNumber(String valueKeyName, Object value)
   {
     if (value instanceof Number) {
-      return (Number) value;
+      return (Number)value;
     } else if (value == null) {
       return new Long(0);
     } else {
       try {
         return numberFormat.parse(value.toString());
       } catch (ParseException ex) {
+        //Fixme
       }
     }
     return new Long(0);
@@ -227,8 +235,9 @@ public abstract class AbstractDimensionTimeBucketOperator extends BaseOperator
   public void setup(OperatorContext context)
   {
     super.setup(context);
-    if(context != null)
+    if (context != null) {
       windowWidth = context.getValue(DAGContext.STREAMING_WINDOW_SIZE_MILLIS);
+    }
     if (dimensionCombinations.isEmpty() && dimensionCombinationsSet == null) {
       dimensionCombinations.add(null);
       for (int i = 1; i <= dimensionKeyNames.size(); i++) {
@@ -236,7 +245,7 @@ public abstract class AbstractDimensionTimeBucketOperator extends BaseOperator
       }
     } else if (dimensionCombinationsSet != null) {
       for (Set<String> keySet : dimensionCombinationsSet) {
-        int indexKeys[] = new int[keySet.size()];
+        int[] indexKeys = new int[keySet.size()];
         int i = 0;
         for (String key : keySet) {
           indexKeys[i] = -1;
@@ -247,7 +256,7 @@ public abstract class AbstractDimensionTimeBucketOperator extends BaseOperator
             }
           }
           if (indexKeys[i] < 0) {
-
+            //fixme
           }
           i++;
         }
@@ -385,7 +394,7 @@ public abstract class AbstractDimensionTimeBucketOperator extends BaseOperator
 
     public static void main(String[] args)
     {
-      String[] list = new String[] { "a", "b", "c", "d", "e" };
+      String[] list = new String[]{"a", "b", "c", "d", "e"};
       for (int i = 1; i <= list.length; i++) {
         logger.info("Combinations: {}", getCombinations(Arrays.asList(list), i));
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java b/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java
index b474535..dab5720 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java
@@ -22,12 +22,12 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.util.List;
 
-import com.datatorrent.api.StorageAgent;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.datatorrent.api.Attribute.AttributeMap;
 import com.datatorrent.api.Context.DAGContext;
+import com.datatorrent.api.StorageAgent;
 
 /**
  * Abstract implementation of {@link ApplicationAwareStorageAgent} which can be

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/ActiveMQMessageListener.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/ActiveMQMessageListener.java b/library/src/main/java/com/datatorrent/lib/util/ActiveMQMessageListener.java
index 63136b1..80cd7b0 100644
--- a/library/src/main/java/com/datatorrent/lib/util/ActiveMQMessageListener.java
+++ b/library/src/main/java/com/datatorrent/lib/util/ActiveMQMessageListener.java
@@ -19,11 +19,21 @@
 package com.datatorrent.lib.util;
 
 import java.util.HashMap;
-import javax.jms.*;
-import org.apache.activemq.ActiveMQConnectionFactory;
+
+import javax.jms.Connection;
+import javax.jms.Destination;
+import javax.jms.JMSException;
+import javax.jms.Message;
+import javax.jms.MessageConsumer;
+import javax.jms.MessageListener;
+import javax.jms.Session;
+import javax.jms.Topic;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.activemq.ActiveMQConnectionFactory;
+
 /**
  *  This is the AcctiveMQ message listener (consumer) outside of Malhar/Hadoop.
  *
@@ -154,8 +164,7 @@ public class ActiveMQMessageListener implements MessageListener, Runnable
       try {
         logger.warn("Reached maximum receive messages of {}", maximumReceiveMessages);
         consumer.setMessageListener(null);
-      }
-      catch (JMSException ex) {
+      } catch (JMSException ex) {
         logger.debug(ex.getLocalizedMessage());
       }
       return;
@@ -167,8 +176,7 @@ public class ActiveMQMessageListener implements MessageListener, Runnable
   {
     try {
       Thread.sleep(2000);  // how long this should be?
-    }
-    catch (InterruptedException ex) {
+    } catch (InterruptedException ex) {
       logger.debug(ex.getLocalizedMessage());
     }
   }
@@ -179,8 +187,7 @@ public class ActiveMQMessageListener implements MessageListener, Runnable
       consumer.close();
       session.close();
       connection.close();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       logger.debug(ex.getLocalizedMessage());
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/ActiveMQMultiTypeMessageListener.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/ActiveMQMultiTypeMessageListener.java b/library/src/main/java/com/datatorrent/lib/util/ActiveMQMultiTypeMessageListener.java
index 5c95442..9ab8fb7 100644
--- a/library/src/main/java/com/datatorrent/lib/util/ActiveMQMultiTypeMessageListener.java
+++ b/library/src/main/java/com/datatorrent/lib/util/ActiveMQMultiTypeMessageListener.java
@@ -21,7 +21,14 @@ package com.datatorrent.lib.util;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.Map;
-import javax.jms.*;
+
+import javax.jms.BytesMessage;
+import javax.jms.JMSException;
+import javax.jms.MapMessage;
+import javax.jms.Message;
+import javax.jms.ObjectMessage;
+import javax.jms.TextMessage;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -39,15 +46,13 @@ public class ActiveMQMultiTypeMessageListener extends ActiveMQMessageListener
       String msg = null;
       try {
         msg = txtMsg.getText();
-        receivedData.put(new Integer(countMessages), msg);
-      }
-      catch (JMSException ex) {
+        receivedData.put(countMessages, msg);
+      } catch (JMSException ex) {
         logger.debug(ex.getLocalizedMessage());
       }
 
       logger.debug("Received a TextMessage: {}", msg);
-    }
-    else if (message instanceof MapMessage) {
+    } else if (message instanceof MapMessage) {
       MapMessage mapMsg = (MapMessage)message;
       Map map = new HashMap();
       try {
@@ -56,41 +61,34 @@ public class ActiveMQMultiTypeMessageListener extends ActiveMQMessageListener
           String key = (String)en.nextElement();
           map.put(key, mapMsg.getObject(key));
         }
-        receivedData.put(new Integer(countMessages), map);
+        receivedData.put(countMessages, map);
 
-      }
-      catch (JMSException ex) {
+      } catch (JMSException ex) {
         logger.debug(ex.getLocalizedMessage());
       }
       logger.debug("Received a MapMessage: {}", map);
-    }
-    else if (message instanceof BytesMessage) {
+    } else if (message instanceof BytesMessage) {
       BytesMessage bytesMsg = (BytesMessage)message;
       try {
         byte[] byteArr = new byte[(int)bytesMsg.getBodyLength()];
         bytesMsg.readBytes(byteArr);
-        receivedData.put(new Integer(countMessages), byteArr);
-      }
-      catch (JMSException ex) {
+        receivedData.put(countMessages, byteArr);
+      } catch (JMSException ex) {
         logger.debug(ex.getLocalizedMessage());
       }
       logger.debug("Received a ByteMessage: {}", bytesMsg);
 
-    }
-    else if (message instanceof ObjectMessage) {
+    } else if (message instanceof ObjectMessage) {
       ObjectMessage objMsg = (ObjectMessage)message;
       Object msg = null;
       try {
         msg = objMsg.getObject();
-        receivedData.put(new Integer(countMessages), msg);
-      }
-      catch (JMSException ex) {
+        receivedData.put(countMessages, msg);
+      } catch (JMSException ex) {
         logger.debug(ex.getLocalizedMessage());
       }
       logger.debug("Received an ObjectMessage: {}", msg);
-    }
-
-    else {
+    } else {
       throw new IllegalArgumentException("Unhandled message type " + message.getClass().getName());
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/ArrayHashMapFrequent.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/ArrayHashMapFrequent.java b/library/src/main/java/com/datatorrent/lib/util/ArrayHashMapFrequent.java
index 3ad1934..d5be753 100644
--- a/library/src/main/java/com/datatorrent/lib/util/ArrayHashMapFrequent.java
+++ b/library/src/main/java/com/datatorrent/lib/util/ArrayHashMapFrequent.java
@@ -18,13 +18,13 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Operator.Unifier;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.Operator.Unifier;
+
 /**
  * This is an abstract operator which consumes key value pairs,
  * where the key is an object and the value is an integer.&nbsp;
@@ -62,11 +62,9 @@ public class ArrayHashMapFrequent<K> implements Unifier<HashMap<K, Integer>>
         lval = e.getValue();
         break;
       }
-    }
-    else {
+    } else {
       for (Map.Entry<K, Integer> e: tuple.entrySet()) {
-        if ((least && (e.getValue() < lval))
-                || (!least && (e.getValue() > lval))) {
+        if ((least && (e.getValue() < lval)) || (!least && (e.getValue() > lval))) {
           mergedTuple.clear();
           mergedTuple.put(e.getKey(), e.getValue());
           break;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/BaseFilteredKeyValueOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/BaseFilteredKeyValueOperator.java b/library/src/main/java/com/datatorrent/lib/util/BaseFilteredKeyValueOperator.java
index 99306d4..43af8d1 100644
--- a/library/src/main/java/com/datatorrent/lib/util/BaseFilteredKeyValueOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/BaseFilteredKeyValueOperator.java
@@ -68,7 +68,7 @@ public class BaseFilteredKeyValueOperator<K, V> extends BaseKeyValueOperator<K,
    * setter function for filterBy
    * @param list list of keys for subtoken filters
    */
-  public void setFilterBy(K [] list)
+  public void setFilterBy(K[] list)
   {
     if (list != null) {
       for (K s: list) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/BaseKeyValueOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/BaseKeyValueOperator.java b/library/src/main/java/com/datatorrent/lib/util/BaseKeyValueOperator.java
index 5fb806e..69c34ca 100644
--- a/library/src/main/java/com/datatorrent/lib/util/BaseKeyValueOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/BaseKeyValueOperator.java
@@ -22,9 +22,8 @@ import java.io.Serializable;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.lib.codec.JavaSerializationStreamCodec;
-
 import com.datatorrent.api.StreamCodec;
+import com.datatorrent.lib.codec.JavaSerializationStreamCodec;
 
 /**
  * This is an abstract operator that allows cloneKey and cloneValue to allow users to use mutable objects.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/BaseLineTokenizer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/BaseLineTokenizer.java b/library/src/main/java/com/datatorrent/lib/util/BaseLineTokenizer.java
index 64e582e..7098af4 100644
--- a/library/src/main/java/com/datatorrent/lib/util/BaseLineTokenizer.java
+++ b/library/src/main/java/com/datatorrent/lib/util/BaseLineTokenizer.java
@@ -18,11 +18,11 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-
 import javax.validation.constraints.NotNull;
 
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * This is an operator which consumes strings and splits them into tokens and sub-tokens.
  * <p>
@@ -168,13 +168,12 @@ public abstract class BaseLineTokenizer extends BaseOperator
     }
     beginProcessSubTokens();
     if (splitTokenBy.isEmpty()) {
-        processSubToken(tok);
-    }
-    else {
+      processSubToken(tok);
+    } else {
       String[] subtoks = tok.split(splitTokenBy);
       int i = 0;
       for (String subtok: subtoks) {
-        if ((i ==0) && !validSubTokenKey(subtok)) { // first subtoken is the key
+        if ((i == 0) && !validSubTokenKey(subtok)) { // first subtoken is the key
           break;
         }
         processSubToken(subtok);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/BaseMatchOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/BaseMatchOperator.java b/library/src/main/java/com/datatorrent/lib/util/BaseMatchOperator.java
index 299d773..69ad0b5 100644
--- a/library/src/main/java/com/datatorrent/lib/util/BaseMatchOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/BaseMatchOperator.java
@@ -55,9 +55,10 @@ public class BaseMatchOperator<K, V> extends BaseKeyValueOperator<K, V>
   @Pattern(regexp = "lte|lt|eq|ne|gt|gte", message = "Value has to be one of lte, lt, eq, ne, gt, gte")
   private double value = 0.0;
 
-  public enum ComparatorFunction {
+  public enum ComparatorFunction
+  {
     LTE, LT, EQ, NEQ, GT, GTE
-  };
+  }
 
   ComparatorFunction comparator = ComparatorFunction.EQ;
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/BaseNumberKeyValueOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/BaseNumberKeyValueOperator.java b/library/src/main/java/com/datatorrent/lib/util/BaseNumberKeyValueOperator.java
index 59b4138..10786c4 100644
--- a/library/src/main/java/com/datatorrent/lib/util/BaseNumberKeyValueOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/BaseNumberKeyValueOperator.java
@@ -40,7 +40,8 @@ public class BaseNumberKeyValueOperator<K,V extends Number> extends BaseFiltered
   public enum V_TYPE
   {
     DOUBLE, INTEGER, FLOAT, LONG, SHORT, UNKNOWN
-  };
+  }
+
   @NotNull
   V_TYPE type = V_TYPE.DOUBLE;
 
@@ -53,20 +54,15 @@ public class BaseNumberKeyValueOperator<K,V extends Number> extends BaseFiltered
   {
     if (ctype == Double.class) {
       type = V_TYPE.DOUBLE;
-    }
-    else if (ctype == Integer.class) {
+    } else if (ctype == Integer.class) {
       type = V_TYPE.INTEGER;
-    }
-    else if (ctype == Float.class) {
+    } else if (ctype == Float.class) {
       type = V_TYPE.FLOAT;
-    }
-    else if (ctype == Long.class) {
+    } else if (ctype == Long.class) {
       type = V_TYPE.LONG;
-    }
-    else if (ctype == Short.class) {
+    } else if (ctype == Short.class) {
       type = V_TYPE.SHORT;
-    }
-    else {
+    } else {
       type = V_TYPE.UNKNOWN;
     }
   }
@@ -79,27 +75,27 @@ public class BaseNumberKeyValueOperator<K,V extends Number> extends BaseFiltered
    * @return value as a correct sub-class (V) object
    */
   @SuppressWarnings("unchecked")
-	public V getValue(Number num)
+  public V getValue(Number num)
   {
     Number val;
     switch (type) {
       case DOUBLE:
-        val = new Double(num.doubleValue());
+        val = num.doubleValue();
         break;
       case INTEGER:
-        val = new Integer(num.intValue());
+        val = num.intValue();
         break;
       case FLOAT:
-        val = new Float(num.floatValue());
+        val = num.floatValue();
         break;
       case LONG:
-        val = new Long(num.longValue());
+        val = num.longValue();
         break;
       case SHORT:
-        val = new Short(num.shortValue());
+        val = num.shortValue();
         break;
       default:
-        val = new Double(num.doubleValue());
+        val = num.doubleValue();
         break;
     }
     return (V)val;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/BaseNumberValueOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/BaseNumberValueOperator.java b/library/src/main/java/com/datatorrent/lib/util/BaseNumberValueOperator.java
index 91d52a6..4dd8a5a 100644
--- a/library/src/main/java/com/datatorrent/lib/util/BaseNumberValueOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/BaseNumberValueOperator.java
@@ -38,7 +38,8 @@ public class BaseNumberValueOperator<V extends Number> extends BaseKeyOperator<V
   public enum V_TYPE
   {
     DOUBLE, INTEGER, FLOAT, LONG, SHORT, UNKNOWN
-  };
+  }
+
   @NotNull
   protected V_TYPE type = V_TYPE.DOUBLE;
 
@@ -72,20 +73,15 @@ public class BaseNumberValueOperator<V extends Number> extends BaseKeyOperator<V
   {
     if (ctype == Double.class) {
       type = V_TYPE.DOUBLE;
-    }
-    else if (ctype == Integer.class) {
+    } else if (ctype == Integer.class) {
       type = V_TYPE.INTEGER;
-    }
-    else if (ctype == Float.class) {
+    } else if (ctype == Float.class) {
       type = V_TYPE.FLOAT;
-    }
-    else if (ctype == Long.class) {
+    } else if (ctype == Long.class) {
       type = V_TYPE.LONG;
-    }
-    else if (ctype == Short.class) {
+    } else if (ctype == Short.class) {
       type = V_TYPE.SHORT;
-    }
-    else {
+    } else {
       type = V_TYPE.UNKNOWN;
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperator.java b/library/src/main/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperator.java
index 65c62b9..f44b764 100644
--- a/library/src/main/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperator.java
@@ -20,10 +20,12 @@ package com.datatorrent.lib.util;
 
 import java.util.HashMap;
 import java.util.Map;
-import org.apache.commons.lang.mutable.MutableDouble;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.lang.mutable.MutableDouble;
+
 /**
  * This operator accumulates the values of "value" fields for different time and dimensions and emits the accumulated values as a map.&nbsp;
  * The emitted map's keys are a combination of the time and dimension fields,
@@ -51,8 +53,7 @@ public class DimensionTimeBucketSumOperator extends AbstractDimensionTimeBucketO
       m = new HashMap<String, Number>();
       m.put(field, new MutableDouble(value));
       dataMap.put(finalKey, m);
-    }
-    else {
+    } else {
       Number n = m.get(field);
       if (n == null) {
         m.put(field, new MutableDouble(value));

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/FieldInfo.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/FieldInfo.java b/library/src/main/java/com/datatorrent/lib/util/FieldInfo.java
index d70ca0c..8153654 100644
--- a/library/src/main/java/com/datatorrent/lib/util/FieldInfo.java
+++ b/library/src/main/java/com/datatorrent/lib/util/FieldInfo.java
@@ -93,9 +93,10 @@ public class FieldInfo
   @Override
   public boolean equals(Object obj)
   {
-    if (obj == null || !(obj instanceof FieldInfo))
+    if (obj == null || !(obj instanceof FieldInfo)) {
       return false;
-    return columnName.equalsIgnoreCase(((FieldInfo) obj).getColumnName());
+    }
+    return columnName.equalsIgnoreCase(((FieldInfo)obj).getColumnName());
   }
 
   /**
@@ -115,8 +116,10 @@ public class FieldInfo
     this.type = type;
   }
 
-  public static enum SupportType {
-    BOOLEAN(Boolean.class), SHORT(Short.class), INTEGER(Integer.class), LONG(Long.class), FLOAT(Float.class), DOUBLE(Double.class), STRING(String.class), OBJECT(Object.class);
+  public static enum SupportType
+  {
+    BOOLEAN(Boolean.class), SHORT(Short.class), INTEGER(Integer.class), LONG(Long.class),
+    FLOAT(Float.class), DOUBLE(Double.class), STRING(String.class), OBJECT(Object.class);
 
     private Class javaType;
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/FilterOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/FilterOperator.java b/library/src/main/java/com/datatorrent/lib/util/FilterOperator.java
index 04055c4..772e0b3 100644
--- a/library/src/main/java/com/datatorrent/lib/util/FilterOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/FilterOperator.java
@@ -18,11 +18,11 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This is the base implementation of an operator, which consumes tuples.&nbsp;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/JavaScriptFilterOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/JavaScriptFilterOperator.java b/library/src/main/java/com/datatorrent/lib/util/JavaScriptFilterOperator.java
index e5a7671..b0179b9 100644
--- a/library/src/main/java/com/datatorrent/lib/util/JavaScriptFilterOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/JavaScriptFilterOperator.java
@@ -20,7 +20,13 @@ package com.datatorrent.lib.util;
 
 import java.util.Map;
 
-import javax.script.*;
+import javax.script.Invocable;
+import javax.script.ScriptContext;
+import javax.script.ScriptEngine;
+import javax.script.ScriptEngineManager;
+import javax.script.ScriptException;
+import javax.script.SimpleBindings;
+import javax.script.SimpleScriptContext;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -83,8 +89,7 @@ public class JavaScriptFilterOperator extends FilterOperator
       if (setupScript != null) {
         engine.eval(setupScript, this.scriptContext);
       }
-    }
-    catch (ScriptException ex) {
+    } catch (ScriptException ex) {
       LOG.error("script \"{}\" has error", setupScript);
       throw new RuntimeException(ex);
     }
@@ -104,22 +109,18 @@ public class JavaScriptFilterOperator extends FilterOperator
       Object result = ((Invocable)engine).invokeFunction(functionName);
       if (result instanceof Boolean) {
         return (Boolean)result;
-      }
-      else if (result instanceof Integer) {
+      } else if (result instanceof Integer) {
         return ((Integer)result) != 0;
-      }
-      else if (result instanceof Long) {
+      } else if (result instanceof Long) {
         return ((Long)result) != 0;
-      }
-      else if (result instanceof String) {
+      } else if (result instanceof String) {
         return Boolean.getBoolean((String)result);
-      }
-      else {
-        LOG.warn("The script result (type: {}) cannot be converted to boolean. Returning false.", result == null ? "null" : result.getClass().getName());
+      } else {
+        LOG.warn("The script result (type: {}) cannot be converted to boolean. Returning false.",
+            result == null ? "null" : result.getClass().getName());
         return false;
       }
-    }
-    catch (Exception ex) {
+    } catch (Exception ex) {
       throw new RuntimeException(ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/KeyHashValPair.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/KeyHashValPair.java b/library/src/main/java/com/datatorrent/lib/util/KeyHashValPair.java
index 87cfd69..983d120 100644
--- a/library/src/main/java/com/datatorrent/lib/util/KeyHashValPair.java
+++ b/library/src/main/java/com/datatorrent/lib/util/KeyHashValPair.java
@@ -53,7 +53,7 @@ public class KeyHashValPair<K, V> extends KeyValPair<K, V>
     if (!(o instanceof Map.Entry)) {
       return false;
     }
-    Map.Entry e = (Map.Entry) o;
+    Map.Entry e = (Map.Entry)o;
     return (this.getKey() == null ? e.getKey() == null : this.getKey().equals(e.getKey()));
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/PojoUtils.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/PojoUtils.java b/library/src/main/java/com/datatorrent/lib/util/PojoUtils.java
index c4b3daa..2bdad86 100644
--- a/library/src/main/java/com/datatorrent/lib/util/PojoUtils.java
+++ b/library/src/main/java/com/datatorrent/lib/util/PojoUtils.java
@@ -18,18 +18,6 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.lib.expression.Expression;
-import com.datatorrent.lib.expression.JavaExpressionParser;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
-import org.apache.commons.lang3.ClassUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.codehaus.commons.compiler.CompileException;
-import org.codehaus.commons.compiler.CompilerFactoryFactory;
-import org.codehaus.commons.compiler.IScriptEvaluator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
 import java.util.ArrayList;
@@ -37,6 +25,21 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
+import org.codehaus.commons.compiler.CompileException;
+import org.codehaus.commons.compiler.CompilerFactoryFactory;
+import org.codehaus.commons.compiler.IScriptEvaluator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.commons.lang3.ClassUtils;
+import org.apache.commons.lang3.StringUtils;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+
+import com.datatorrent.lib.expression.Expression;
+import com.datatorrent.lib.expression.JavaExpressionParser;
+
 /**
  * @since 2.1.0
  */
@@ -135,7 +138,8 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> GetterBoolean<T> createGetterBoolean(Class<? extends T> pojoClass, String getterExpr, String exprObjectPlaceholder)
   {
-    return (GetterBoolean<T>) createGetter(pojoClass, getterExpr, exprObjectPlaceholder, boolean.class, GetterBoolean.class);
+    return (GetterBoolean<T>)createGetter(pojoClass, getterExpr, exprObjectPlaceholder, boolean.class,
+        GetterBoolean.class);
   }
 
   public static <T> GetterByte<T> createGetterByte(Class<? extends T> pojoClass, String getterExpr)
@@ -146,7 +150,7 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> GetterByte<T> createGetterByte(Class<? extends T> pojoClass, String getterExpr, String exprObjectPlaceholder)
   {
-    return (GetterByte<T>) createGetter(pojoClass, getterExpr, exprObjectPlaceholder, byte.class, GetterByte.class);
+    return (GetterByte<T>)createGetter(pojoClass, getterExpr, exprObjectPlaceholder, byte.class, GetterByte.class);
   }
 
   public static <T> GetterChar<T> createGetterChar(Class<? extends T> pojoClass, String getterExpr)
@@ -157,7 +161,7 @@ public class PojoUtils
   @SuppressWarnings({ "unchecked"})
   public static <T> GetterChar<T> createGetterChar(Class<? extends T> pojoClass, String getterExpr, String exprObjectPlaceholder)
   {
-    return (GetterChar<T>) createGetter(pojoClass, getterExpr, exprObjectPlaceholder, char.class, GetterChar.class);
+    return (GetterChar<T>)createGetter(pojoClass, getterExpr, exprObjectPlaceholder, char.class, GetterChar.class);
   }
 
   public static <T> GetterShort<T> createGetterShort(Class<? extends T> pojoClass, String getterExpr)
@@ -168,7 +172,7 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> GetterShort<T> createGetterShort(Class<? extends T> pojoClass, String getterExpr, String exprObjectPlaceholder)
   {
-    return (GetterShort<T>) createGetter(pojoClass, getterExpr, exprObjectPlaceholder, short.class, GetterShort.class);
+    return (GetterShort<T>)createGetter(pojoClass, getterExpr, exprObjectPlaceholder, short.class, GetterShort.class);
   }
 
   public static <T> GetterInt<T> createGetterInt(Class<? extends T> pojoClass, String getterExpr)
@@ -179,7 +183,7 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> GetterInt<T> createGetterInt(Class<? extends T> pojoClass, String getterExpr, String exprObjectPlaceholder)
   {
-    return (GetterInt<T>) createGetter(pojoClass, getterExpr, exprObjectPlaceholder, int.class, GetterInt.class);
+    return (GetterInt<T>)createGetter(pojoClass, getterExpr, exprObjectPlaceholder, int.class, GetterInt.class);
   }
 
   public static <T> GetterLong<T> createGetterLong(Class<? extends T> pojoClass, String getterExpr)
@@ -190,7 +194,7 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> GetterLong<T> createGetterLong(Class<? extends T> pojoClass, String getterExpr, String exprObjectPlaceholder)
   {
-    return (GetterLong<T>) createGetter(pojoClass, getterExpr, exprObjectPlaceholder, long.class, GetterLong.class);
+    return (GetterLong<T>)createGetter(pojoClass, getterExpr, exprObjectPlaceholder, long.class, GetterLong.class);
   }
 
   public static <T> GetterFloat<T> createGetterFloat(Class<? extends T> pojoClass, String getterExpr)
@@ -201,7 +205,7 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> GetterFloat<T> createGetterFloat(Class<? extends T> pojoClass, String getterExpr, String exprObjectPlaceholder)
   {
-    return (GetterFloat<T>) createGetter(pojoClass, getterExpr, exprObjectPlaceholder, float.class, GetterFloat.class);
+    return (GetterFloat<T>)createGetter(pojoClass, getterExpr, exprObjectPlaceholder, float.class, GetterFloat.class);
   }
 
   public static <T> GetterDouble<T> createGetterDouble(Class<? extends T> pojoClass, String getterExpr)
@@ -212,7 +216,8 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> GetterDouble<T> createGetterDouble(Class<? extends T> pojoClass, String getterExpr, String exprObjectPlaceholder)
   {
-    return (GetterDouble<T>) createGetter(pojoClass, getterExpr, exprObjectPlaceholder, double.class, GetterDouble.class);
+    return (GetterDouble<T>)createGetter(pojoClass, getterExpr, exprObjectPlaceholder, double.class,
+        GetterDouble.class);
   }
 
   public static <T, V> Getter<T, V> createGetter(Class<? extends T> pojoClass, String getterExpr, Class<? extends V> exprClass)
@@ -227,7 +232,7 @@ public class PojoUtils
       throw new IllegalArgumentException("createGetter does not allow primitive class \"" + exprClass.getName() +
               "\" for exprClass argument. Use createGetter" + upperCaseWord(exprClass.getName()) + " or constructGetter().");
     }
-    return (Getter<T, V>) createGetter(pojoClass, getterExpr, exprObjectPlaceholder, exprClass, Getter.class);
+    return (Getter<T, V>)createGetter(pojoClass, getterExpr, exprObjectPlaceholder, exprClass, Getter.class);
   }
 
   public static Object constructGetter(Class<?> pojoClass, String getterExpr, Class<?> exprClass)
@@ -334,7 +339,8 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> SetterBoolean<T> createSetterBoolean(Class<? extends T> pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder)
   {
-    return (SetterBoolean<T>) createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, boolean.class, SetterBoolean.class);
+    return (SetterBoolean<T>)createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder,
+        boolean.class, SetterBoolean.class);
   }
 
   public static <T> SetterByte<T> createSetterByte(Class<? extends T> pojoClass, String setterExpr)
@@ -345,7 +351,8 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> SetterByte<T> createSetterByte(Class<? extends T> pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder)
   {
-    return (SetterByte<T>) createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, byte.class, SetterByte.class);
+    return (SetterByte<T>)createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, byte.class,
+        SetterByte.class);
   }
 
   public static <T> SetterChar<T> createSetterChar(Class<? extends T> pojoClass, String setterExpr)
@@ -356,7 +363,8 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> SetterChar<T> createSetterChar(Class<? extends T> pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder)
   {
-    return (SetterChar<T>) createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, char.class, SetterChar.class);
+    return (SetterChar<T>)createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, char.class,
+        SetterChar.class);
   }
 
   public static <T> SetterShort<T> createSetterShort(Class<? extends T> pojoClass, String setterExpr)
@@ -367,7 +375,8 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> SetterShort<T> createSetterShort(Class<? extends T> pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder)
   {
-    return (SetterShort<T>) createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, short.class, SetterShort.class);
+    return (SetterShort<T>)createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, short.class,
+        SetterShort.class);
   }
 
   public static <T> SetterInt<T> createSetterInt(Class<? extends T> pojoClass, String setterExpr)
@@ -378,7 +387,8 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> SetterInt<T> createSetterInt(Class<? extends T> pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder)
   {
-    return (SetterInt<T>) createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, int.class, SetterInt.class);
+    return (SetterInt<T>)createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, int.class,
+        SetterInt.class);
   }
 
   public static <T> SetterLong<T> createSetterLong(Class<? extends T> pojoClass, String setterExpr)
@@ -389,7 +399,8 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> SetterLong<T> createSetterLong(Class<? extends T> pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder)
   {
-    return (SetterLong<T>) createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, long.class, SetterLong.class);
+    return (SetterLong<T>)createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, long.class,
+        SetterLong.class);
   }
 
   public static <T> SetterFloat<T> createSetterFloat(Class<? extends T> pojoClass, String setterExpr)
@@ -400,7 +411,8 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> SetterFloat<T> createSetterFloat(Class<? extends T> pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder)
   {
-    return (SetterFloat<T>) createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, float.class, SetterFloat.class);
+    return (SetterFloat<T>)createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, float.class,
+        SetterFloat.class);
   }
 
   public static <T> SetterDouble<T> createSetterDouble(Class<? extends T> pojoClass, String setterExpr)
@@ -411,25 +423,28 @@ public class PojoUtils
   @SuppressWarnings("unchecked")
   public static <T> SetterDouble<T> createSetterDouble(Class<? extends T> pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder)
   {
-    return (SetterDouble<T>) createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, double.class, SetterDouble.class);
+    return (SetterDouble<T>)createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder,
+        double.class, SetterDouble.class);
   }
 
-  public static <T, V> Setter<T, V> createSetter(Class<? extends T>pojoClass, String setterExpr, Class<? extends V> exprClass)
+  public static <T, V> Setter<T, V> createSetter(Class<? extends T> pojoClass, String setterExpr, Class<? extends V> exprClass)
   {
     return createSetter(pojoClass, setterExpr, DEFAULT_EXP_OBJECT_PLACEHOLDER, DEFAULT_EXP_VAL_PLACEHOLDER, exprClass);
   }
 
   @SuppressWarnings("unchecked")
-  public static <T, V> Setter<T, V> createSetter(Class<? extends T>pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder, Class<? extends V> exprClass)
+  public static <T, V> Setter<T, V> createSetter(Class<? extends T> pojoClass, String setterExpr, String exprObjectPlaceholder, String exprValuePlaceholder, Class<? extends V> exprClass)
   {
     if (primitiveClassToSetterInterface.get(exprClass) != null) {
       throw new IllegalArgumentException("createSetter does not allow primitive class \"" + exprClass.getName() +
               "\" for exprClass argument. Use createSetter" + upperCaseWord(exprClass.getName()) + " or constructSetter().");
     }
-    return (Setter<T, V>) createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, exprClass, Setter.class);
+    return (Setter<T, V>)createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValuePlaceholder, exprClass,
+        Setter.class);
   }
 
-  public static Object constructSetter(Class<?> pojoClass, String setterExpr, Class<?> exprClass) {
+  public static Object constructSetter(Class<?> pojoClass, String setterExpr, Class<?> exprClass)
+  {
     return constructSetter(pojoClass, setterExpr, DEFAULT_EXP_OBJECT_PLACEHOLDER, DEFAULT_EXP_VAL_PLACEHOLDER, exprClass);
   }
 
@@ -449,35 +464,42 @@ public class PojoUtils
     return createSetter(pojoClass, setterExpr, exprObjectPlaceholder, exprValPlaceholder, exprClass, interfaceToImplement);
   }
 
-  private static class JavaStatement {
+  private static class JavaStatement
+  {
     private final StringBuilder javaStatement;
     private final int capacity;
 
-    private JavaStatement() {
+    private JavaStatement()
+    {
       javaStatement = new StringBuilder();
       capacity = javaStatement.capacity();
     }
 
-    private JavaStatement(int length) {
+    private JavaStatement(int length)
+    {
       javaStatement = new StringBuilder(length);
       capacity = javaStatement.capacity();
     }
 
     @Override
-    public String toString() {
+    public String toString()
+    {
       return javaStatement.toString();
     }
 
-    protected JavaStatement append(String string) {
+    protected JavaStatement append(String string)
+    {
       javaStatement.append(string);
       return this;
     }
 
-    private JavaStatement appendCastToTypeExpr(Class<?> type, String expr) {
+    private JavaStatement appendCastToTypeExpr(Class<?> type, String expr)
+    {
       return append("((").append(type.getName()).append(")(").append(expr).append("))");
     }
 
-    protected String getStatement() {
+    protected String getStatement()
+    {
       if (capacity < javaStatement.length() + 1) {
         logger.debug("Java statement capacity {} was not sufficient for the statement length {}. Actual statement {}", capacity, javaStatement.length(), javaStatement);
       }
@@ -485,19 +507,23 @@ public class PojoUtils
     }
   }
 
-  private static class JavaReturnStatement extends JavaStatement {
-    private JavaReturnStatement(Class<?> returnType) {
+  private static class JavaReturnStatement extends JavaStatement
+  {
+    private JavaReturnStatement(Class<?> returnType)
+    {
       super();
       append("return (").append(returnType.getName()).append(")");
     }
 
-    private JavaReturnStatement(int length, Class<?> returnType) {
+    private JavaReturnStatement(int length, Class<?> returnType)
+    {
       super(length);
       append("return ((").append(returnType.getName()).append(")");
     }
 
     @Override
-    protected String getStatement() {
+    protected String getStatement()
+    {
       append(")");
       return super.getStatement();
     }
@@ -543,13 +569,10 @@ public class PojoUtils
         return code.append(methodName).append("()").getStatement();
       }
       logger.debug("method {} of the {} returns {} that can not be assigned to {}. Proceeding to locate another getter method.",
-              pojoClass, methodName, method.getReturnType(), exprClass);
-    } catch (NoSuchMethodException ex) {
-      logger.debug("{} does not have method {}. Proceeding to locate another getter method.",
-              pojoClass, methodName);
-    } catch (SecurityException ex) {
+          pojoClass, methodName, method.getReturnType(), exprClass);
+    } catch (NoSuchMethodException | SecurityException ex) {
       logger.debug("{} does not have method {}. Proceeding to locate another getter method.",
-              pojoClass, methodName);
+          pojoClass, methodName);
     }
 
     methodName = IS + upperCaseWord(fieldExpression);
@@ -559,13 +582,10 @@ public class PojoUtils
         return code.append(methodName).append("()").getStatement();
       }
       logger.debug("method {} of the {} returns {} that can not be assigned to {}. Proceeding with the original expression {}.",
-              pojoClass, methodName, method.getReturnType(), exprClass, fieldExpression);
-    } catch (NoSuchMethodException ex) {
+          pojoClass, methodName, method.getReturnType(), exprClass, fieldExpression);
+    } catch (NoSuchMethodException | SecurityException ex) {
       logger.debug("{} does not have method {}. Proceeding with the original expression {}.",
-              pojoClass, methodName, fieldExpression);
-    } catch (SecurityException ex) {
-      logger.debug("{} does not have method {}. Proceeding with the original expression {}.",
-              pojoClass, methodName, fieldExpression);
+          pojoClass, methodName, fieldExpression);
     }
 
     return code.append(fieldExpression).getStatement();
@@ -612,8 +632,7 @@ public class PojoUtils
           if (exprClass == parameterTypes[0]) {
             bestMatchMethod = method;
             break;
-          }
-          else if (ClassUtils.isAssignable(exprClass, parameterTypes[0])) {
+          } else if (ClassUtils.isAssignable(exprClass, parameterTypes[0])) {
             candidates.add(method);
           }
         }
@@ -623,7 +642,7 @@ public class PojoUtils
     if (bestMatchMethod == null) { // We did not find the exact match, use candidates to find the match
       if (candidates.size() == 0) {
         logger.debug("{} does not have suitable setter method {}. Returning original expression {}.",
-                pojoClass, setMethodName, fieldExpression);
+            pojoClass, setMethodName, fieldExpression);
         /* We did not find any match at all, use original expression */
         /* append = (<expr type>)val;*/
         return code.append(fieldExpression).append(" = ").appendCastToTypeExpr(exprClass, VAL).getStatement();
@@ -654,12 +673,11 @@ public class PojoUtils
 
 
     String code = StringUtils.replaceEach(setterExpr, new String[]{exprObjectPlaceholder, exprValPlaceholder},
-            new String[]{new JavaStatement().appendCastToTypeExpr(pojoClass, OBJECT).toString(), new JavaStatement().appendCastToTypeExpr(exprClass, VAL).toString()});
+        new String[]{new JavaStatement().appendCastToTypeExpr(pojoClass, OBJECT).toString(), new JavaStatement().appendCastToTypeExpr(exprClass, VAL).toString()});
     if (code != setterExpr) {
       code = new JavaStatement(code.length() + 1).append(code).getStatement();
       logger.debug("Original expression {} is a complex expression. Replacing it with {}.", setterExpr, code);
-    }
-    else {
+    } else {
       code = getSingleFieldSetterExpression(pojoClass, setterExpr, exprClass);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/ReusableStringReader.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/ReusableStringReader.java b/library/src/main/java/com/datatorrent/lib/util/ReusableStringReader.java
index 3503043..9be3a2f 100644
--- a/library/src/main/java/com/datatorrent/lib/util/ReusableStringReader.java
+++ b/library/src/main/java/com/datatorrent/lib/util/ReusableStringReader.java
@@ -41,8 +41,9 @@ public class ReusableStringReader extends Reader
     } else if (len == 0) {
       return 0;
     }
-    if (next >= length)
+    if (next >= length) {
       return -1;
+    }
     int n = Math.min(length - next, len);
     str.getChars(next, next + n, cbuf, off);
     next += n;
@@ -58,8 +59,9 @@ public class ReusableStringReader extends Reader
   public int read() throws IOException
   {
     ensureOpen();
-    if (next >= length)
+    if (next >= length) {
       return -1;
+    }
     return str.charAt(next++);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/ReversibleComparator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/ReversibleComparator.java b/library/src/main/java/com/datatorrent/lib/util/ReversibleComparator.java
index bb01f76..d533111 100644
--- a/library/src/main/java/com/datatorrent/lib/util/ReversibleComparator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/ReversibleComparator.java
@@ -44,6 +44,7 @@ public class ReversibleComparator<E> implements Comparator<E>
   {
     ascending = flag;
   }
+
   public boolean ascending = true;
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/TableInfo.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/TableInfo.java b/library/src/main/java/com/datatorrent/lib/util/TableInfo.java
index e59de59..b0d454d 100644
--- a/library/src/main/java/com/datatorrent/lib/util/TableInfo.java
+++ b/library/src/main/java/com/datatorrent/lib/util/TableInfo.java
@@ -25,7 +25,7 @@ import javax.validation.constraints.NotNull;
 /**
  * @since 3.3.0
  */
-public class TableInfo< T extends FieldInfo >
+public class TableInfo<T extends FieldInfo>
 {
   //the row or id expression
   private String rowOrIdExpression;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/TimeBucketKey.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/TimeBucketKey.java b/library/src/main/java/com/datatorrent/lib/util/TimeBucketKey.java
index f580d54..8476a94 100644
--- a/library/src/main/java/com/datatorrent/lib/util/TimeBucketKey.java
+++ b/library/src/main/java/com/datatorrent/lib/util/TimeBucketKey.java
@@ -53,10 +53,10 @@ public class TimeBucketKey
   private static DateFormat hourDateFormat = new SimpleDateFormat("'h|'yyyyMMddHH");
   private static DateFormat minuteDateFormat = new SimpleDateFormat("'m|'yyyyMMddHHmm");
 
-  private static final long MILLIS_IN_MIN = 60*1000;
-  private static final long MILLIS_IN_HOUR = 60*60*1000;
-  private static final long MILLIS_IN_DAY = 24*60*60*1000;
-  private static final long MILLIS_IN_WEEK = 7*24*60*60*1000;
+  private static final long MILLIS_IN_MIN = 60 * 1000;
+  private static final long MILLIS_IN_HOUR = 60 * 60 * 1000;
+  private static final long MILLIS_IN_DAY = 24 * 60 * 60 * 1000;
+  private static final long MILLIS_IN_WEEK = 7 * 24 * 60 * 60 * 1000;
 
   static {
     // TODO - Fix this
@@ -72,10 +72,12 @@ public class TimeBucketKey
   private Calendar time;
   private int timeSpec;
 
-  public TimeBucketKey() {
+  public TimeBucketKey()
+  {
   }
 
-  public TimeBucketKey(Calendar time, int timeSpec) {
+  public TimeBucketKey(Calendar time, int timeSpec)
+  {
     this.time = time;
     this.timeSpec = timeSpec;
   }
@@ -136,17 +138,13 @@ public class TimeBucketKey
       TimeBucketKey ckey = (TimeBucketKey)obj;
       if (timeSpec == TIMESPEC_MINUTE_SPEC) {
         equal = ((time.getTimeInMillis() / MILLIS_IN_MIN) == (ckey.getTime().getTimeInMillis() / MILLIS_IN_MIN));
-      }
-      else if (timeSpec == TIMESPEC_HOUR_SPEC) {
+      } else if (timeSpec == TIMESPEC_HOUR_SPEC) {
         equal = ((time.getTimeInMillis() / MILLIS_IN_HOUR) == (ckey.getTime().getTimeInMillis() / MILLIS_IN_HOUR));
-      }
-      else if (timeSpec == TIMESPEC_DAY_SPEC) {
+      } else if (timeSpec == TIMESPEC_DAY_SPEC) {
         equal = ((time.getTimeInMillis() / MILLIS_IN_DAY) == (ckey.getTime().getTimeInMillis() / MILLIS_IN_DAY));
-      }
-      else if (timeSpec == TIMESPEC_WEEK_SPEC) {
+      } else if (timeSpec == TIMESPEC_WEEK_SPEC) {
         equal = ((time.getTimeInMillis() / MILLIS_IN_WEEK) == (ckey.getTime().getTimeInMillis() / MILLIS_IN_WEEK));
-      }
-      else {
+      } else {
         boolean chkEqual = true;
         if ((timeSpec & TIMESPEC_YEAR) != 0) {
           if (time.get(Calendar.YEAR) != ckey.getTime().get(Calendar.YEAR)) {
@@ -170,15 +168,15 @@ public class TimeBucketKey
     Date date = time.getTime();
     if (timeSpec == TIMESPEC_YEAR_SPEC) {
       return yearDateFormat.format(date);
-    }else if (timeSpec == TIMESPEC_MONTH_SPEC) {
+    } else if (timeSpec == TIMESPEC_MONTH_SPEC) {
       return monthDateFormat.format(date);
-    }else if (timeSpec == TIMESPEC_WEEK_SPEC) {
+    } else if (timeSpec == TIMESPEC_WEEK_SPEC) {
       return weekDateFormat.format(date);
-    }else if (timeSpec == TIMESPEC_DAY_SPEC) {
+    } else if (timeSpec == TIMESPEC_DAY_SPEC) {
       return dayDateFormat.format(date);
-    }else if (timeSpec == TIMESPEC_HOUR_SPEC) {
+    } else if (timeSpec == TIMESPEC_HOUR_SPEC) {
       return hourDateFormat.format(date);
-    }else if (timeSpec == TIMESPEC_MINUTE_SPEC) {
+    } else if (timeSpec == TIMESPEC_MINUTE_SPEC) {
       return minuteDateFormat.format(date);
     }
     return null;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/TopNSort.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/TopNSort.java b/library/src/main/java/com/datatorrent/lib/util/TopNSort.java
index 00f2c9c..ba9cb01 100644
--- a/library/src/main/java/com/datatorrent/lib/util/TopNSort.java
+++ b/library/src/main/java/com/datatorrent/lib/util/TopNSort.java
@@ -160,8 +160,7 @@ public class TopNSort<E>
 
     if (ascending) { // means head is the lowest value due to inversion
       insert = head.compareTo(e) <= 0; // e >= head
-    }
-    else { // means head is the highest value due to inversion
+    } else { // means head is the highest value due to inversion
       insert = head.compareTo(e) >= 0; // head is <= e
     }
     if (insert && q.offer(e)) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/TopNUniqueSort.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/TopNUniqueSort.java b/library/src/main/java/com/datatorrent/lib/util/TopNUniqueSort.java
index cbe46ad..1171ca3 100644
--- a/library/src/main/java/com/datatorrent/lib/util/TopNUniqueSort.java
+++ b/library/src/main/java/com/datatorrent/lib/util/TopNUniqueSort.java
@@ -142,7 +142,7 @@ public class TopNUniqueSort<E>
       depth = n;
     }
     for (int i = 0; i < depth; i++) {
-      E o = (E) list.get(size - i - 1);
+      E o = (E)list.get(size - i - 1);
       HashMap<E, Integer> val = new HashMap<E, Integer>(1);
       MutableInt ival = hmap.get(o);
       val.put(o, ival.toInteger());
@@ -176,12 +176,11 @@ public class TopNUniqueSort<E>
 
     boolean ret = false;
     boolean insert;
-    Comparable<? super E> head = (Comparable<? super E>) q.peek();
+    Comparable<? super E> head = (Comparable<? super E>)q.peek();
 
     if (ascending) { // means head is the lowest value due to inversion
       insert = head.compareTo(e) < 0; // e > head
-    }
-    else { // means head is the highest value due to inversion
+    } else { // means head is the highest value due to inversion
       insert = head.compareTo(e) > 0; // head is < e
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierArrayHashMapFrequent.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierArrayHashMapFrequent.java b/library/src/main/java/com/datatorrent/lib/util/UnifierArrayHashMapFrequent.java
index b84aed7..f02a590 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierArrayHashMapFrequent.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierArrayHashMapFrequent.java
@@ -18,14 +18,14 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Operator.Unifier;
-
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.Operator.Unifier;
+
 /**
  * This unifier consumes key value pairs in the form of a list of hash maps,
  * where the key is an object and the value is an integer.&nbsp;
@@ -62,11 +62,9 @@ public class UnifierArrayHashMapFrequent<K> implements Unifier<ArrayList<HashMap
           lval = e.getValue();
           break;
         }
-      }
-      else {
+      } else {
         for (Map.Entry<K, Integer> e: tuple.entrySet()) {
-          if ((least && (e.getValue() < lval))
-                  || (!least && (e.getValue() > lval))) {
+          if ((least && (e.getValue() < lval)) || (!least && (e.getValue() > lval))) {
             mergedTuple.clear();
             mergedTuple.put(e.getKey(), e.getValue());
             break;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierArrayList.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierArrayList.java b/library/src/main/java/com/datatorrent/lib/util/UnifierArrayList.java
index ccaf52c..cb6c79b 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierArrayList.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierArrayList.java
@@ -20,8 +20,8 @@ package com.datatorrent.lib.util;
 
 import java.util.ArrayList;
 
-import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanAnd.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanAnd.java b/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanAnd.java
index 4c6d822..13c26cc 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanAnd.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanAnd.java
@@ -18,8 +18,8 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanOr.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanOr.java b/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanOr.java
index ed13098..1b900e0 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanOr.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierBooleanOr.java
@@ -18,8 +18,8 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierCountOccurKey.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierCountOccurKey.java b/library/src/main/java/com/datatorrent/lib/util/UnifierCountOccurKey.java
index 37ea7b7..4a9efcf 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierCountOccurKey.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierCountOccurKey.java
@@ -18,13 +18,13 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Operator.Unifier;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.Operator.Unifier;
+
 /**
  * This unifier counts the number of times it consumes an input tuple within each application window.&nbsp;
  * At the end of each window the tuples and their counts are emitted as a map.
@@ -38,9 +38,9 @@ import java.util.Map;
  */
 public class UnifierCountOccurKey<K> implements Unifier<KeyValPair<K, Integer>>
 {
-	/**
-	 * Key/Occurrence  map used for unifying key/occurrence values.
-	 */
+  /**
+   * Key/Occurrence  map used for unifying key/occurrence values.
+   */
   private HashMap<K, Integer> counts = new HashMap<K, Integer>();
 
   /**
@@ -56,12 +56,12 @@ public class UnifierCountOccurKey<K> implements Unifier<KeyValPair<K, Integer>>
   @Override
   public void process(KeyValPair<K, Integer> tuple)
   {
-  	if (counts.containsKey(tuple.getKey())) {
-  		Integer val = (Integer)counts.remove(tuple.getKey());
-  		counts.put(tuple.getKey(), val + tuple.getValue());
-  	} else {
-  		counts.put(tuple.getKey(), tuple.getValue());
-  	}
+    if (counts.containsKey(tuple.getKey())) {
+      Integer val = (Integer)counts.remove(tuple.getKey());
+      counts.put(tuple.getKey(), val + tuple.getValue());
+    } else {
+      counts.put(tuple.getKey(), tuple.getValue());
+    }
   }
 
   /**
@@ -82,7 +82,7 @@ public class UnifierCountOccurKey<K> implements Unifier<KeyValPair<K, Integer>>
   {
     if (!counts.isEmpty())  {
       for (Map.Entry<K, Integer> entry : counts.entrySet()) {
-      	outport.emit(new KeyValPair<K, Integer>(entry.getKey(), entry.getValue()));
+        outport.emit(new KeyValPair<K, Integer>(entry.getKey(), entry.getValue()));
       }
     }
     counts = new HashMap<K, Integer>();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierHashMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMap.java b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMap.java
index e79d333..389440a 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMap.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMap.java
@@ -18,12 +18,12 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
+import java.util.HashMap;
+
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 
-import java.util.HashMap;
-
 /**
  * This unifier combines all the hash maps it receives within an application window,
  * and emits the combined hash map at the end of the application window.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapFrequent.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapFrequent.java b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapFrequent.java
index 1d5076f..046b7e0 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapFrequent.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapFrequent.java
@@ -18,13 +18,13 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Operator.Unifier;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.Operator.Unifier;
+
 /**
  * This unifier consumes key value pairs in the form of a hash map, where the key is an object and the value is an integer.&nbsp;
  * The operator emits either the largest or smallest value associated with each key at the end of each application window.
@@ -60,11 +60,9 @@ public class UnifierHashMapFrequent<K> implements Unifier<HashMap<K, Integer>>
         lval = e.getValue();
         break;
       }
-    }
-    else {
+    } else {
       for (Map.Entry<K, Integer> e: tuple.entrySet()) {
-        if ((least && (e.getValue() < lval))
-                || (!least && (e.getValue() > lval))) {
+        if ((least && (e.getValue() < lval)) || (!least && (e.getValue() > lval))) {
           mergedTuple.clear();
           mergedTuple.put(e.getKey(), e.getValue());
           break;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapInteger.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapInteger.java b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapInteger.java
index 4894196..354f18a 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapInteger.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapInteger.java
@@ -18,13 +18,13 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Operator.Unifier;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.Operator.Unifier;
+
 /**
  * This unifier consumes tuples which are maps from objects to integers.&nbsp;
  * The integers for each key are aggregated and a map from keys to sums is emitted at the end of each application window.
@@ -55,8 +55,7 @@ public class UnifierHashMapInteger<K> implements Unifier<HashMap<K, Integer>>
       Integer val = mergedTuple.get(e.getKey());
       if (val == null) {
         val = e.getValue();
-      }
-      else {
+      } else {
         val += e.getValue();
       }
       mergedTuple.put(e.getKey(), val);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapRange.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapRange.java b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapRange.java
index 8c10544..4ba9821 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapRange.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapRange.java
@@ -18,13 +18,13 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Operator.Unifier;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.Operator.Unifier;
+
 /**
  * This unifier consumes hash maps, where the key is an object and the value is a number.&nbsp;
  * The unifier emits the minimum and maximum value for each key in a map at the end of each application window.
@@ -56,8 +56,7 @@ public class UnifierHashMapRange<K, V extends Number> implements Unifier<HashMap
       if (val == null) {
         val = new HighLow(e.getValue().getHigh(), e.getValue().getLow());
         mergedTuple.put(e.getKey(), val);
-      }
-      else {
+      } else {
         if (val.getHigh().doubleValue() < e.getValue().getHigh().doubleValue()) {
           val.setHigh(e.getValue().getHigh());
         }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapSumKeys.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapSumKeys.java b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapSumKeys.java
index 7fca414..253e3e2 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapSumKeys.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierHashMapSumKeys.java
@@ -18,13 +18,13 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Operator.Unifier;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.Operator.Unifier;
+
 /**
  * This unifier consumes hash maps whose keys are objects and whose values are numbers.&nbsp;
  * The values for each key are summed and emitted in a hash map at the end of each application window.
@@ -51,8 +51,7 @@ public class UnifierHashMapSumKeys<K, V extends Number> extends BaseNumberKeyVal
       Double val = mergedTuple.get(e.getKey());
       if (val == null) {
         mergedTuple.put(e.getKey(), e.getValue().doubleValue());
-      }
-      else {
+      } else {
         val += e.getValue().doubleValue();
         mergedTuple.put(e.getKey(), val);
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierKeyValRange.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierKeyValRange.java b/library/src/main/java/com/datatorrent/lib/util/UnifierKeyValRange.java
index d12355e..5e08799 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierKeyValRange.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierKeyValRange.java
@@ -18,13 +18,13 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Operator.Unifier;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.Operator.Unifier;
+
 /**
  * This unifier consumes key value pairs, where the key is an object and the value is a number.&nbsp;
  * The unifier emits the minimum and maximum value for each key in a map at the end of each application window.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierMap.java b/library/src/main/java/com/datatorrent/lib/util/UnifierMap.java
index f56455f..cc62fd3 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierMap.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierMap.java
@@ -18,13 +18,13 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Operator.Unifier;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.Operator.Unifier;
+
 /**
  * This unifier combines all the maps it receives within an application window,
  * and emits the combined map at the end of the application window.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierRange.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierRange.java b/library/src/main/java/com/datatorrent/lib/util/UnifierRange.java
index d9e3455..103e056 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierRange.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierRange.java
@@ -18,8 +18,8 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 
 
@@ -51,8 +51,7 @@ public class UnifierRange<V extends Number> implements Unifier<HighLow<V>>
   {
     if (mergedTuple == null) {
       mergedTuple = new HighLow(tuple.getHigh(), tuple.getLow());
-    }
-    else {
+    } else {
       if (mergedTuple.getHigh().doubleValue() < tuple.getHigh().doubleValue()) {
         mergedTuple.setHigh(tuple.getHigh());
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/UnifierSumNumber.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/UnifierSumNumber.java b/library/src/main/java/com/datatorrent/lib/util/UnifierSumNumber.java
index 23bf98d..9aa817c 100644
--- a/library/src/main/java/com/datatorrent/lib/util/UnifierSumNumber.java
+++ b/library/src/main/java/com/datatorrent/lib/util/UnifierSumNumber.java
@@ -18,8 +18,8 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/xml/AbstractXmlDOMOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/xml/AbstractXmlDOMOperator.java b/library/src/main/java/com/datatorrent/lib/xml/AbstractXmlDOMOperator.java
index 2ab7875..d74def4 100644
--- a/library/src/main/java/com/datatorrent/lib/xml/AbstractXmlDOMOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/xml/AbstractXmlDOMOperator.java
@@ -18,14 +18,15 @@
  */
 package com.datatorrent.lib.xml;
 
-
-import javax.xml.parsers.*;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
 
 import org.w3c.dom.Document;
 import org.xml.sax.InputSource;
 
-import com.datatorrent.api.*;
-
+import com.datatorrent.api.Context;
+import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.netlet.util.DTThrowable;
 
@@ -69,7 +70,8 @@ public abstract class AbstractXmlDOMOperator<T> extends BaseOperator
     }
   };
 
-  protected void processTuple(T tuple) {
+  protected void processTuple(T tuple)
+  {
     try {
       InputSource source = getInputSource(tuple);
       Document document = docBuilder.parse(source);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/org/apache/apex/malhar/lib/fs/BytesFileOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/fs/BytesFileOutputOperator.java b/library/src/main/java/org/apache/apex/malhar/lib/fs/BytesFileOutputOperator.java
index 82f038f..acdcfdb 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/fs/BytesFileOutputOperator.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/fs/BytesFileOutputOperator.java
@@ -36,8 +36,6 @@ import com.datatorrent.netlet.util.DTThrowable;
  * This class is responsible for writing tuples to HDFS. All tuples are written
  * to the same file. Rolling file based on size, no. of tuples, idle windows,
  * elapsed windows is supported.
- * 
- * @param <T>
  */
 
 class BytesFileOutputOperator extends AbstractSingleFileOutputOperator<byte[]>

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/org/apache/apex/malhar/lib/state/managed/ManagedTimeStateImpl.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/state/managed/ManagedTimeStateImpl.java b/library/src/main/java/org/apache/apex/malhar/lib/state/managed/ManagedTimeStateImpl.java
index 708cfeb..c2fcf6f 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/state/managed/ManagedTimeStateImpl.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/state/managed/ManagedTimeStateImpl.java
@@ -23,11 +23,12 @@ import java.util.concurrent.Future;
 import javax.validation.constraints.Min;
 import javax.validation.constraints.NotNull;
 
+import org.apache.apex.malhar.lib.state.BucketedState;
+import org.apache.apex.malhar.lib.state.TimeSlicedBucketedState;
+
 import com.google.common.util.concurrent.Futures;
 
 import com.datatorrent.api.annotation.OperatorAnnotation;
-import org.apache.apex.malhar.lib.state.BucketedState;
-import org.apache.apex.malhar.lib.state.TimeSlicedBucketedState;
 import com.datatorrent.netlet.util.Slice;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/org/apache/hadoop/io/file/tfile/CacheManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/hadoop/io/file/tfile/CacheManager.java b/library/src/main/java/org/apache/hadoop/io/file/tfile/CacheManager.java
index 2f47a76..5a27fc5 100644
--- a/library/src/main/java/org/apache/hadoop/io/file/tfile/CacheManager.java
+++ b/library/src/main/java/org/apache/hadoop/io/file/tfile/CacheManager.java
@@ -21,8 +21,9 @@ package org.apache.hadoop.io.file.tfile;
 import java.lang.management.ManagementFactory;
 import java.util.Collection;
 
-import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.io.file.tfile.DTBCFile.Reader.BlockReader;
+
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.Weigher;
@@ -51,7 +52,8 @@ public class CacheManager
 
   private static boolean enableStats = false;
 
-  public static final Cache<String, BlockReader> buildCache(CacheBuilder builder) {
+  public static final Cache<String, BlockReader> buildCache(CacheBuilder builder)
+  {
     if (singleCache != null) {
       singleCache.cleanUp();
     }
@@ -70,11 +72,10 @@ public class CacheManager
    * @param maximunSize
    * @return The cache.
    */
-  public static final Cache<String, BlockReader> createCache(int concurrencyLevel,int initialCapacity, int maximunSize){
-    CacheBuilder builder = CacheBuilder.newBuilder().
-        concurrencyLevel(concurrencyLevel).
-        initialCapacity(initialCapacity).
-        maximumSize(maximunSize);
+  public static final Cache<String, BlockReader> createCache(int concurrencyLevel,int initialCapacity, int maximunSize)
+  {
+    CacheBuilder builder = CacheBuilder.newBuilder().concurrencyLevel(concurrencyLevel)
+        .initialCapacity(initialCapacity).maximumSize(maximunSize);
 
     return buildCache(builder);
   }
@@ -87,12 +88,11 @@ public class CacheManager
    * @param maximumMemory
    * @return The cache.
    */
-  public static final Cache<String, BlockReader> createCache(int concurrencyLevel,int initialCapacity, long maximumMemory){
+  public static final Cache<String, BlockReader> createCache(int concurrencyLevel,int initialCapacity, long maximumMemory)
+  {
 
-    CacheBuilder builder = CacheBuilder.newBuilder().
-        concurrencyLevel(concurrencyLevel).
-        initialCapacity(initialCapacity).
-        maximumWeight(maximumMemory).weigher(new KVWeigher());
+    CacheBuilder builder = CacheBuilder.newBuilder().concurrencyLevel(concurrencyLevel).initialCapacity(initialCapacity)
+        .maximumWeight(maximumMemory).weigher(new KVWeigher());
 
     return buildCache(builder);
   }
@@ -104,30 +104,36 @@ public class CacheManager
    * @param heapMemPercentage
    * @return The cache.
    */
-  public static final Cache<String, BlockReader> createCache(int concurrencyLevel,int initialCapacity, float heapMemPercentage){
-    CacheBuilder builder = CacheBuilder.newBuilder().
-        concurrencyLevel(concurrencyLevel).
-        initialCapacity(initialCapacity).
-        maximumWeight((long) (ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax() * heapMemPercentage)).weigher(new KVWeigher());
+  public static final Cache<String, BlockReader> createCache(int concurrencyLevel, int initialCapacity,
+      float heapMemPercentage)
+  {
+    CacheBuilder builder = CacheBuilder.newBuilder()
+        .concurrencyLevel(concurrencyLevel).initialCapacity(initialCapacity)
+        .maximumWeight((long)(ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax() * heapMemPercentage))
+        .weigher(new KVWeigher());
     return buildCache(builder);
   }
 
-  public static final void createDefaultCache(){
+  public static final void createDefaultCache()
+  {
 
-    long availableMemory = (long) (ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax() * DEFAULT_HEAP_MEMORY_PERCENTAGE);
-    CacheBuilder<String, BlockReader> builder = CacheBuilder.newBuilder().maximumWeight(availableMemory).weigher(new KVWeigher());
+    long availableMemory = (long)(ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax() * DEFAULT_HEAP_MEMORY_PERCENTAGE);
+    CacheBuilder<String, BlockReader> builder = CacheBuilder.newBuilder().maximumWeight(availableMemory).weigher(
+        new KVWeigher());
 
     singleCache = buildCache(builder);
   }
 
-  public static final void put(String key, BlockReader blk){
+  public static final void put(String key, BlockReader blk)
+  {
     if (singleCache == null) {
       createDefaultCache();
     }
     singleCache.put(key, blk);
   }
 
-  public static final BlockReader get(String key){
+  public static final BlockReader get(String key)
+  {
     if (singleCache == null) {
       return null;
     }
@@ -136,17 +142,21 @@ public class CacheManager
 
   public static final void invalidateKeys(Collection<String> keys)
   {
-    if (singleCache != null)
+    if (singleCache != null) {
       singleCache.invalidateAll(keys);
+    }
   }
 
-  public static final long getCacheSize() {
-    if (singleCache != null)
+  public static final long getCacheSize()
+  {
+    if (singleCache != null) {
       return singleCache.size();
+    }
     return 0;
   }
 
-  public static final class KVWeigher implements Weigher<String, BlockReader> {
+  public static final class KVWeigher implements Weigher<String, BlockReader>
+  {
 
     @Override
     public int weigh(String key, BlockReader value)
@@ -159,11 +169,13 @@ public class CacheManager
   }
 
   @VisibleForTesting
-  protected static Cache<String, BlockReader> getCache() {
+  protected static Cache<String, BlockReader> getCache()
+  {
     return singleCache;
   }
 
-  public static final void setEnableStats(boolean enable) {
+  public static final void setEnableStats(boolean enable)
+  {
     enableStats = enable;
   }
 
@@ -172,13 +184,11 @@ public class CacheManager
 
     //code to eitsmate the overhead of the instance of the key value objects
     // it depends on hbase file
-//    System.out.println(ClassSize.estimateBase(BlockReader.class, true) +
 //        ClassSize.estimateBase(Algorithm.class, true) +
 //        ClassSize.estimateBase(RBlockState.class, true) +
 //        ClassSize.estimateBase(ReusableByteArrayInputStream.class, true) +
 //        ClassSize.estimateBase(BlockRegion.class, true));
 //
-//    System.out.println(
 //        ClassSize.estimateBase(String.class, true));
   }
 



[21/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
APEXMALHAR-2095 removed checkstyle violations of malhar library module


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/commit/3735316e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/tree/3735316e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/diff/3735316e

Branch: refs/heads/master
Commit: 3735316e8964fed605f2f9684cf11bb11bfb4817
Parents: 45c6825
Author: CI Support <je...@datatorrent.com>
Authored: Wed May 18 13:11:10 2016 -0700
Committer: Chandni Singh <cs...@apache.org>
Committed: Wed May 18 13:13:47 2016 -0700

----------------------------------------------------------------------
 library/library-checkstyle-suppressions.xml     |   34 +
 library/pom.xml                                 |    7 +-
 .../lib/algo/AbstractStreamPatternMatcher.java  |    5 +-
 .../datatorrent/lib/algo/BottomNUnifier.java    |    6 +-
 .../com/datatorrent/lib/algo/FilterKeyVals.java |    9 +-
 .../datatorrent/lib/algo/FilterKeysHashMap.java |    4 +-
 .../com/datatorrent/lib/algo/FilterKeysMap.java |   11 +-
 .../com/datatorrent/lib/algo/FilterValues.java  |    2 +-
 .../java/com/datatorrent/lib/algo/FirstN.java   |    3 +-
 .../com/datatorrent/lib/algo/InsertSort.java    |    3 +-
 .../datatorrent/lib/algo/InsertSortDesc.java    |    9 +-
 .../lib/algo/LeastFrequentKeyArrayUnifier.java  |    2 +-
 .../lib/algo/LeastFrequentKeyMap.java           |    6 +-
 .../lib/algo/LeastFrequentKeyUnifier.java       |    2 +-
 .../lib/algo/LeastFrequentValue.java            |    4 +-
 .../java/com/datatorrent/lib/algo/MatchMap.java |    5 +-
 .../com/datatorrent/lib/algo/MergeSort.java     |  224 ++--
 .../datatorrent/lib/algo/MergeSortNumber.java   |   94 +-
 .../lib/algo/MostFrequentKeyMap.java            |    4 +-
 .../datatorrent/lib/algo/MostFrequentValue.java |    2 +-
 .../com/datatorrent/lib/algo/UniqueCounter.java |    9 +-
 .../com/datatorrent/lib/appdata/QueueUtils.java |   11 +-
 .../com/datatorrent/lib/appdata/StoreUtils.java |   35 +-
 .../datastructs/CacheLRUSynchronousFlush.java   |   27 +-
 .../appdata/datastructs/DimensionalTable.java   |   69 +-
 .../lib/appdata/gpo/GPOByteArrayList.java       |   10 +-
 .../datatorrent/lib/appdata/gpo/GPOGetters.java |    4 +-
 .../datatorrent/lib/appdata/gpo/GPOMutable.java |   96 +-
 .../datatorrent/lib/appdata/gpo/GPOType.java    |  244 ++--
 .../datatorrent/lib/appdata/gpo/GPOUtils.java   | 1176 +++++++-----------
 .../com/datatorrent/lib/appdata/gpo/Serde.java  |    5 +-
 .../lib/appdata/gpo/SerdeFieldsDescriptor.java  |   10 +-
 .../lib/appdata/gpo/SerdeListGPOMutable.java    |   19 +-
 .../lib/appdata/gpo/SerdeListPrimitive.java     |   14 +-
 .../lib/appdata/gpo/SerdeListString.java        |   12 +-
 .../query/AbstractWindowEndQueueManager.java    |   89 +-
 .../query/AppDataWindowEndQueueManager.java     |   15 +-
 .../lib/appdata/query/QueryBundle.java          |    4 +-
 .../lib/appdata/query/QueryExecutor.java        |    4 +-
 .../appdata/query/QueryManagerAsynchronous.java |   37 +-
 .../appdata/query/QueryManagerSynchronous.java  |   23 +-
 .../lib/appdata/query/QueueList.java            |   25 +-
 .../lib/appdata/query/QueueManager.java         |    5 +
 .../appdata/query/SimpleDoneQueueManager.java   |    4 +-
 .../lib/appdata/query/SimpleQueueManager.java   |   16 +-
 .../lib/appdata/query/WindowBoundedService.java |   11 +-
 .../appdata/query/WindowEndQueueManager.java    |    9 +-
 .../query/serde/CustomMessageDeserializer.java  |    4 +-
 .../serde/DataQuerySnapshotDeserializer.java    |   81 +-
 .../query/serde/DataQuerySnapshotValidator.java |   11 +-
 .../serde/DataResultSnapshotSerializer.java     |   14 +-
 .../query/serde/MessageDeserializerFactory.java |   84 +-
 .../query/serde/MessageSerializerFactory.java   |   52 +-
 .../query/serde/SchemaQueryDeserializer.java    |   21 +-
 .../query/serde/SimpleDataDeserializer.java     |    3 +-
 .../query/serde/SimpleDataSerializer.java       |    3 +-
 .../lib/appdata/schemas/CustomTimeBucket.java   |   18 +-
 .../lib/appdata/schemas/DataQuerySnapshot.java  |   40 +-
 .../lib/appdata/schemas/DataResultSnapshot.java |   14 +-
 .../schemas/DimensionalConfigurationSchema.java |   11 +-
 .../lib/appdata/schemas/DimensionalSchema.java  |    5 +-
 .../datatorrent/lib/appdata/schemas/Fields.java |   17 +-
 .../lib/appdata/schemas/FieldsDescriptor.java   |   84 +-
 .../datatorrent/lib/appdata/schemas/QRBase.java |    7 +-
 .../datatorrent/lib/appdata/schemas/Query.java  |   21 +-
 .../datatorrent/lib/appdata/schemas/Result.java |    3 +-
 .../lib/appdata/schemas/ResultFormatter.java    |   58 +-
 .../datatorrent/lib/appdata/schemas/Schema.java |    5 +
 .../lib/appdata/schemas/SchemaQuery.java        |   13 +-
 .../lib/appdata/schemas/SchemaRegistry.java     |    3 +
 .../appdata/schemas/SchemaRegistryMultiple.java |    7 +-
 .../appdata/schemas/SchemaRegistrySingle.java   |   18 +-
 .../lib/appdata/schemas/SchemaResult.java       |   10 +-
 .../appdata/schemas/SchemaResultSerializer.java |   23 +-
 .../lib/appdata/schemas/SchemaUtils.java        |  144 +--
 .../lib/appdata/schemas/SnapshotSchema.java     |   61 +-
 .../lib/appdata/schemas/TimeBucket.java         |   11 +-
 .../datatorrent/lib/appdata/schemas/Type.java   |   52 +-
 .../snapshot/AbstractAppDataSnapshotServer.java |   16 +-
 .../snapshot/AppDataSnapshotServerMap.java      |   15 +-
 .../snapshot/AppDataSnapshotServerPOJO.java     |    2 +-
 .../lib/codec/JavaSerializationStreamCodec.java |   62 +-
 .../ByteArrayToStringConverterOperator.java     |    7 +-
 .../datatorrent/lib/converter/Converter.java    |    3 +-
 .../MapToKeyHashValuePairConverter.java         |    8 +-
 .../converter/MapToKeyValuePairConverter.java   |   11 +-
 .../StringValueToNumberConverterForMap.java     |   14 +-
 .../datatorrent/lib/counters/BasicCounters.java |   11 +-
 ...nsactionableKeyValueStoreOutputOperator.java |    2 +-
 ...BatchTransactionableStoreOutputOperator.java |   10 +-
 .../db/AbstractKeyValueStoreInputOperator.java  |    6 +-
 ...nsactionableKeyValueStoreOutputOperator.java |    2 +-
 ...sThruTransactionableStoreOutputOperator.java |    3 +-
 .../lib/db/AbstractStoreInputOperator.java      |    8 +-
 .../lib/db/AbstractStoreOutputOperator.java     |    8 +-
 ...tractTransactionableStoreOutputOperator.java |    8 +-
 ...bcNonTransactionableBatchOutputOperator.java |   20 +-
 .../db/jdbc/JDBCDimensionalOutputOperator.java  |    8 +-
 .../lib/db/jdbc/JdbcNonTransactionalStore.java  |   11 +-
 .../lib/fileaccess/DTFileReader.java            |    5 +-
 .../lib/fileaccess/FileAccessFSImpl.java        |    6 +-
 .../datatorrent/lib/fileaccess/TFileImpl.java   |   13 +-
 .../datatorrent/lib/fileaccess/TFileReader.java |    9 +-
 .../datatorrent/lib/fileaccess/TFileWriter.java |    8 +-
 .../datatorrent/lib/formatter/Formatter.java    |    2 +-
 .../lib/io/AbstractHttpGetOperator.java         |    3 +-
 .../lib/io/AbstractHttpInputOperator.java       |   11 +-
 .../lib/io/AbstractHttpOperator.java            |    6 +-
 .../io/AbstractKeyValueStoreOutputOperator.java |   12 +-
 .../lib/io/AbstractSocketInputOperator.java     |   16 +-
 .../datatorrent/lib/io/ApacheGenRandomLogs.java |  315 ++---
 .../CollectionMultiConsoleOutputOperator.java   |    8 +-
 .../lib/io/ConsoleOutputOperator.java           |   11 +-
 .../lib/io/HttpJsonChunksInputOperator.java     |   13 +-
 .../lib/io/HttpLinesInputOperator.java          |    6 +-
 .../lib/io/HttpPostOutputOperator.java          |    7 +-
 .../lib/io/IdempotentStorageManager.java        |   15 +-
 .../lib/io/MapMultiConsoleOutputOperator.java   |    8 +-
 .../lib/io/PubSubWebSocketAppDataQuery.java     |   17 +-
 .../lib/io/PubSubWebSocketAppDataResult.java    |   17 +-
 .../lib/io/PubSubWebSocketInputOperator.java    |    3 +-
 .../lib/io/SimpleSinglePortInputOperator.java   |   20 +-
 .../datatorrent/lib/io/SmtpOutputOperator.java  |   30 +-
 .../lib/io/WebSocketInputOperator.java          |   25 +-
 .../lib/io/WebSocketOutputOperator.java         |    6 +-
 .../lib/io/WebSocketServerInputOperator.java    |   14 +-
 .../lib/io/WidgetOutputOperator.java            |   28 +-
 .../lib/io/fs/AbstractFileInputOperator.java    |  182 ++-
 .../lib/io/fs/AbstractReconciler.java           |   17 +-
 .../fs/AbstractThroughputFileInputOperator.java |   34 +-
 .../com/datatorrent/lib/io/fs/FileSplitter.java |  101 +-
 .../lib/io/fs/FilterStreamContext.java          |    2 +-
 .../lib/io/fs/FilterStreamProvider.java         |   13 +-
 .../lib/io/fs/TailFsInputOperator.java          |    6 +-
 .../com/datatorrent/lib/io/fs/package-info.java |    2 +-
 .../lib/io/jms/AbstractJMSOutputOperator.java   |   38 +-
 .../AbstractJMSSinglePortOutputOperator.java    |    3 +-
 .../io/jms/FSPsuedoTransactionableStore.java    |   69 +-
 .../com/datatorrent/lib/io/jms/JMSBase.java     |   18 +-
 .../lib/io/jms/JMSMultiPortOutputOperator.java  |   34 +-
 .../lib/io/jms/JMSObjectInputOperator.java      |   39 +-
 .../lib/io/jms/JMSTransactionableStore.java     |   56 +-
 .../datatorrent/lib/io/jms/package-info.java    |    2 +-
 .../logs/ApacheLogParseMapOutputOperator.java   |   13 +-
 .../lib/logs/ApacheLogParseOperator.java        |  193 ++-
 .../lib/logs/ApacheVirtualLogParseOperator.java |  296 ++---
 .../lib/logs/DimensionAggregationUnifier.java   |    6 +-
 .../datatorrent/lib/logs/DimensionObject.java   |   11 +-
 .../lib/logs/FilteredLineToTokenArrayList.java  |    5 +-
 .../lib/logs/FilteredLineToTokenHashMap.java    |   57 +-
 .../lib/logs/FilteredLineTokenizerKeyVal.java   |   57 +-
 .../lib/logs/LineToTokenArrayList.java          |   14 +-
 .../lib/logs/LineToTokenHashMap.java            |    5 +-
 .../com/datatorrent/lib/logs/LineTokenizer.java |   31 +-
 .../lib/logs/LineTokenizerKeyVal.java           |   14 +-
 .../logs/MultiWindowDimensionAggregation.java   |   30 +-
 .../lib/logs/RegexMatchMapOperator.java         |   15 +-
 .../lib/math/AbstractAggregateCalc.java         |   88 +-
 .../datatorrent/lib/math/AbstractOutput.java    |   42 +-
 .../lib/math/AbstractXmlCartesianProduct.java   |   42 +-
 .../AbstractXmlKeyValueCartesianProduct.java    |    3 +-
 .../java/com/datatorrent/lib/math/Average.java  |  138 +-
 .../com/datatorrent/lib/math/AverageKeyVal.java |  167 +--
 .../java/com/datatorrent/lib/math/Change.java   |  102 +-
 .../com/datatorrent/lib/math/ChangeAlert.java   |  114 +-
 .../datatorrent/lib/math/ChangeAlertKeyVal.java |  136 +-
 .../datatorrent/lib/math/ChangeAlertMap.java    |    2 +-
 .../com/datatorrent/lib/math/ChangeKeyVal.java  |    6 +-
 .../datatorrent/lib/math/CompareExceptMap.java  |    9 +-
 .../com/datatorrent/lib/math/CompareMap.java    |   11 +-
 .../com/datatorrent/lib/math/CountKeyVal.java   |  114 +-
 .../java/com/datatorrent/lib/math/Division.java |   12 +-
 .../com/datatorrent/lib/math/ExceptMap.java     |   58 +-
 .../datatorrent/lib/math/LogicalCompare.java    |  102 +-
 .../lib/math/LogicalCompareToConstant.java      |  130 +-
 .../java/com/datatorrent/lib/math/Margin.java   |  160 +--
 .../com/datatorrent/lib/math/MarginKeyVal.java  |  254 ++--
 .../com/datatorrent/lib/math/MarginMap.java     |   16 +-
 .../main/java/com/datatorrent/lib/math/Max.java |    5 +-
 .../com/datatorrent/lib/math/MaxKeyVal.java     |   10 +-
 .../main/java/com/datatorrent/lib/math/Min.java |   15 +-
 .../com/datatorrent/lib/math/MinKeyVal.java     |   16 +-
 .../lib/math/MultiplyByConstant.java            |    8 +-
 .../java/com/datatorrent/lib/math/Quotient.java |  108 +-
 .../com/datatorrent/lib/math/QuotientMap.java   |  342 ++---
 .../java/com/datatorrent/lib/math/Range.java    |  106 +-
 .../com/datatorrent/lib/math/RangeKeyVal.java   |  146 ++-
 .../datatorrent/lib/math/RunningAverage.java    |  114 +-
 .../java/com/datatorrent/lib/math/Sigma.java    |   40 +-
 .../main/java/com/datatorrent/lib/math/Sum.java |  366 +++---
 .../com/datatorrent/lib/math/SumCountMap.java   |  444 +++----
 .../com/datatorrent/lib/math/SumKeyVal.java     |   13 +-
 .../math/XmlKeyValueStringCartesianProduct.java |    7 +-
 .../lib/multiwindow/AbstractSlidingWindow.java  |  210 ++--
 .../AbstractSlidingWindowKeyVal.java            |  184 +--
 .../lib/multiwindow/MultiWindowRangeKeyVal.java |    2 +-
 .../lib/multiwindow/MultiWindowSumKeyVal.java   |   75 +-
 .../lib/multiwindow/SimpleMovingAverage.java    |  150 +--
 .../lib/multiwindow/SortedMovingWindow.java     |   10 +-
 .../StatsAwareStatelessPartitioner.java         |   37 +-
 .../lib/script/JavaScriptOperator.java          |   43 +-
 .../datatorrent/lib/script/ScriptOperator.java  |   13 +-
 .../lib/statistics/MedianOperator.java          |   12 +-
 .../lib/statistics/ModeOperator.java            |    4 +-
 .../lib/statistics/StandardDeviation.java       |   12 +-
 .../lib/statistics/WeightedMeanOperator.java    |   14 +-
 .../lib/stream/AbstractAggregator.java          |  166 +--
 .../datatorrent/lib/stream/ArrayListToItem.java |   43 +-
 .../lib/stream/ConsolidatorKeyVal.java          |  266 ++--
 .../com/datatorrent/lib/stream/Counter.java     |   92 +-
 .../com/datatorrent/lib/stream/DevNull.java     |   24 +-
 .../datatorrent/lib/stream/DevNullCounter.java  |  264 ++--
 .../lib/stream/HashMapToKeyValPair.java         |   85 +-
 .../lib/stream/JsonByteArrayOperator.java       |   10 +-
 .../lib/stream/KeyValPairToHashMap.java         |   43 +-
 .../lib/stream/RoundRobinHashMap.java           |   97 +-
 .../lib/stream/StreamDuplicater.java            |    6 +-
 .../datatorrent/lib/stream/StreamMerger.java    |   10 +-
 .../streamquery/AbstractSqlStreamOperator.java  |   11 +-
 .../lib/streamquery/DeleteOperator.java         |    2 +-
 .../lib/streamquery/DerbySqlStreamOperator.java |   93 +-
 .../lib/streamquery/GroupByHavingOperator.java  |   23 +-
 .../lib/streamquery/InnerJoinOperator.java      |   47 +-
 .../lib/streamquery/OrderByOperator.java        |  100 +-
 .../lib/streamquery/OrderByRule.java            |    2 +-
 .../lib/streamquery/OuterJoinOperator.java      |    4 +-
 .../lib/streamquery/SelectFunctionOperator.java |    4 +-
 .../lib/streamquery/SelectOperator.java         |    5 +-
 .../lib/streamquery/SelectTopOperator.java      |   17 +-
 .../lib/streamquery/UpdateOperator.java         |   33 +-
 .../streamquery/condition/BetweenCondition.java |   18 +-
 .../condition/CompoundCondition.java            |   19 +-
 .../lib/streamquery/condition/Condition.java    |   13 +-
 .../condition/EqualValueCondition.java          |   15 +-
 .../condition/HavingCompareValue.java           |    4 +-
 .../streamquery/condition/HavingCondition.java  |    7 +-
 .../lib/streamquery/condition/InCondition.java  |   12 +-
 .../condition/JoinColumnEqualCondition.java     |    9 +-
 .../streamquery/condition/LikeCondition.java    |   14 +-
 .../streamquery/function/AverageFunction.java   |   10 +-
 .../lib/streamquery/function/CountFunction.java |   12 +-
 .../streamquery/function/FirstLastFunction.java |   24 +-
 .../lib/streamquery/function/FunctionIndex.java |   18 +-
 .../streamquery/function/MaxMinFunction.java    |   14 +-
 .../lib/streamquery/function/SumFunction.java   |    8 +-
 .../lib/streamquery/index/BinaryExpression.java |    8 +-
 .../lib/streamquery/index/ColumnIndex.java      |    4 +-
 .../lib/streamquery/index/Index.java            |    2 +-
 .../lib/streamquery/index/MidIndex.java         |   16 +-
 .../lib/streamquery/index/NegateExpression.java |    8 +-
 .../lib/streamquery/index/RoundDoubleIndex.java |   16 +-
 .../lib/streamquery/index/StringCaseIndex.java  |   10 +-
 .../lib/streamquery/index/StringLenIndex.java   |   10 +-
 .../lib/streamquery/index/SumExpression.java    |   14 +-
 .../lib/streamquery/index/UnaryExpression.java  |    6 +-
 .../lib/testbench/ArrayListTestSink.java        |    7 +-
 .../lib/testbench/CollectorTestSink.java        |   14 +-
 .../lib/testbench/CompareFilterTuples.java      |  117 +-
 .../testbench/CountAndLastTupleTestSink.java    |    4 +-
 .../lib/testbench/CountOccurance.java           |  116 +-
 .../lib/testbench/CountTestSink.java            |    3 +-
 .../lib/testbench/EventClassifier.java          |   92 +-
 .../EventClassifierNumberToHashDouble.java      |   17 +-
 .../lib/testbench/EventGenerator.java           |   27 +-
 .../lib/testbench/EventIncrementer.java         |   15 +-
 .../lib/testbench/FilterClassifier.java         |   13 +-
 .../lib/testbench/FilteredEventClassifier.java  |   13 +-
 .../datatorrent/lib/testbench/HashTestSink.java |   22 +-
 .../lib/testbench/HttpStatusFilter.java         |  105 +-
 .../datatorrent/lib/testbench/KeyValSum.java    |   72 +-
 .../lib/testbench/RandomEventGenerator.java     |   15 +-
 .../lib/testbench/RandomWordGenerator.java      |   11 +-
 .../datatorrent/lib/testbench/RedisSumOper.java |   70 +-
 .../lib/testbench/SeedEventClassifier.java      |   12 +-
 .../lib/testbench/SeedEventGenerator.java       |   13 +-
 .../datatorrent/lib/testbench/SumTestSink.java  |    2 +-
 .../lib/testbench/ThroughputCounter.java        |   19 +-
 .../lib/testbench/TopOccurrence.java            |  162 ++-
 .../lib/transform/TransformOperator.java        |    3 +-
 .../lib/util/AbstractBaseFrequentKey.java       |   15 +-
 .../util/AbstractBaseFrequentKeyValueMap.java   |   11 +-
 .../lib/util/AbstractBaseMatchOperator.java     |   25 +-
 .../util/AbstractBaseNNonUniqueOperatorMap.java |    7 +-
 .../lib/util/AbstractBaseNOperatorMap.java      |    4 +-
 .../util/AbstractBaseNUniqueOperatorMap.java    |    7 +-
 .../lib/util/AbstractBaseSortOperator.java      |   11 +-
 .../AbstractDimensionTimeBucketOperator.java    |   33 +-
 .../lib/util/AbstractKeyValueStorageAgent.java  |    2 +-
 .../lib/util/ActiveMQMessageListener.java       |   23 +-
 .../util/ActiveMQMultiTypeMessageListener.java  |   42 +-
 .../lib/util/ArrayHashMapFrequent.java          |   14 +-
 .../lib/util/BaseFilteredKeyValueOperator.java  |    2 +-
 .../lib/util/BaseKeyValueOperator.java          |    3 +-
 .../datatorrent/lib/util/BaseLineTokenizer.java |   13 +-
 .../datatorrent/lib/util/BaseMatchOperator.java |    5 +-
 .../lib/util/BaseNumberKeyValueOperator.java    |   32 +-
 .../lib/util/BaseNumberValueOperator.java       |   18 +-
 .../util/DimensionTimeBucketSumOperator.java    |    7 +-
 .../com/datatorrent/lib/util/FieldInfo.java     |   11 +-
 .../datatorrent/lib/util/FilterOperator.java    |    2 +-
 .../lib/util/JavaScriptFilterOperator.java      |   29 +-
 .../datatorrent/lib/util/KeyHashValPair.java    |    2 +-
 .../com/datatorrent/lib/util/PojoUtils.java     |  142 ++-
 .../lib/util/ReusableStringReader.java          |    6 +-
 .../lib/util/ReversibleComparator.java          |    1 +
 .../com/datatorrent/lib/util/TableInfo.java     |    2 +-
 .../com/datatorrent/lib/util/TimeBucketKey.java |   36 +-
 .../java/com/datatorrent/lib/util/TopNSort.java |    3 +-
 .../datatorrent/lib/util/TopNUniqueSort.java    |    7 +-
 .../lib/util/UnifierArrayHashMapFrequent.java   |   14 +-
 .../datatorrent/lib/util/UnifierArrayList.java  |    2 +-
 .../datatorrent/lib/util/UnifierBooleanAnd.java |    2 +-
 .../datatorrent/lib/util/UnifierBooleanOr.java  |    2 +-
 .../lib/util/UnifierCountOccurKey.java          |   28 +-
 .../datatorrent/lib/util/UnifierHashMap.java    |    6 +-
 .../lib/util/UnifierHashMapFrequent.java        |   14 +-
 .../lib/util/UnifierHashMapInteger.java         |   11 +-
 .../lib/util/UnifierHashMapRange.java           |   11 +-
 .../lib/util/UnifierHashMapSumKeys.java         |   11 +-
 .../lib/util/UnifierKeyValRange.java            |    8 +-
 .../com/datatorrent/lib/util/UnifierMap.java    |    8 +-
 .../com/datatorrent/lib/util/UnifierRange.java  |    5 +-
 .../datatorrent/lib/util/UnifierSumNumber.java  |    2 +-
 .../lib/xml/AbstractXmlDOMOperator.java         |   12 +-
 .../malhar/lib/fs/BytesFileOutputOperator.java  |    2 -
 .../lib/state/managed/ManagedTimeStateImpl.java |    5 +-
 .../hadoop/io/file/tfile/CacheManager.java      |   70 +-
 .../tfile/ReusableByteArrayInputStream.java     |    7 +-
 .../algo/AbstractStreamPatternMatcherTest.java  |    2 +-
 .../lib/algo/AllAfterMatchMapTest.java          |   12 +-
 .../datatorrent/lib/algo/BottomNMapTest.java    |   10 +-
 .../lib/algo/BottomNUnifierTest.java            |   10 +-
 .../lib/algo/BottomNUniqueMapTest.java          |   11 +-
 .../datatorrent/lib/algo/DistinctMapTest.java   |    6 +-
 .../datatorrent/lib/algo/FilterKeysMapTest.java |    2 +-
 .../datatorrent/lib/algo/FilterValuesTest.java  |    4 +-
 .../com/datatorrent/lib/algo/FirstNTest.java    |   10 +-
 .../lib/algo/InsertSortDescTest.java            |    8 +-
 .../datatorrent/lib/algo/InsertSortTest.java    |    2 +-
 .../lib/algo/InvertIndexArrayTest.java          |   10 +-
 .../datatorrent/lib/algo/InvertIndexTest.java   |   10 +-
 .../lib/algo/LeastFrequentKeyMapTest.java       |    5 +-
 .../lib/algo/LeastFrequentKeyTest.java          |   15 +-
 .../lib/algo/LeastFrequentKeyValueMapTest.java  |    8 +-
 .../datatorrent/lib/algo/MatchAllMapTest.java   |    4 +-
 .../datatorrent/lib/algo/MatchAnyMapTest.java   |    2 +-
 .../com/datatorrent/lib/algo/MatchMapTest.java  |    8 +-
 .../lib/algo/MergeSortNumberTest.java           |   60 +-
 .../lib/algo/MostFrequentKeyMapTest.java        |    5 +-
 .../lib/algo/MostFrequentKeyTest.java           |   15 +-
 .../lib/algo/MostFrequentKeyValueMapTest.java   |    8 +-
 .../java/com/datatorrent/lib/algo/TopNTest.java |   10 +-
 .../datatorrent/lib/algo/TopNUniqueTest.java    |    8 +-
 .../datatorrent/lib/algo/UniqueCounterTest.java |   10 +-
 .../lib/algo/UniqueValueCountTest.java          |   85 +-
 .../CacheLRUSynchronousFlushTest.java           |   11 +-
 .../datastructs/DimensionalTableTest.java       |   18 +-
 .../lib/appdata/gpo/GPOMutableTest.java         |    4 +-
 .../lib/appdata/gpo/GPOUtilsTest.java           |   48 +-
 .../appdata/gpo/SerdeFieldsDescriptorTest.java  |    7 +-
 .../appdata/gpo/SerdeListGPOMutableTest.java    |    9 +-
 .../lib/appdata/gpo/SerdeListPrimitiveTest.java |   17 +-
 .../lib/appdata/gpo/SerdeListStringTest.java    |    6 +-
 .../lib/appdata/query/MockQuery.java            |   14 +-
 .../lib/appdata/query/MockResult.java           |   10 +-
 .../lib/appdata/query/MockResultSerializer.java |    5 +-
 .../query/QueryManagerAsynchronousTest.java     |   58 +-
 .../query/QueryManagerSynchronousTest.java      |   17 +-
 .../lib/appdata/query/QueueUtilsTest.java       |    4 +-
 .../query/SimpleDoneQueryQueueManagerTest.java  |   10 +-
 .../appdata/query/WEQueryQueueManagerTest.java  |   54 +-
 .../appdata/query/WindowBoundedServiceTest.java |    6 +-
 .../serde/MessageDeserializerFactoryTest.java   |    6 +-
 .../DataQuerySnapshotDeserializerTest.java      |   15 +-
 .../appdata/schemas/DataQuerySnapshotTest.java  |   10 +-
 .../DataResultSnapshotSerializerTest.java       |   25 +-
 .../DimensionalConfigurationSchemaTest.java     |  190 ++-
 .../appdata/schemas/DimensionalSchemaTest.java  |  184 ++-
 .../appdata/schemas/FieldsDescriptorTest.java   |   30 +-
 .../appdata/schemas/ResultFormatterTest.java    |   16 +-
 .../lib/appdata/schemas/SchemaQueryTest.java    |   42 +-
 .../schemas/SchemaRegistryMultipleTest.java     |   12 +-
 .../lib/appdata/schemas/SchemaTestUtils.java    |    8 +-
 .../lib/appdata/schemas/SchemaUtilsTest.java    |    8 +-
 .../lib/appdata/schemas/SnapshotSchemaTest.java |   14 +-
 .../snapshot/AppDataSnapshotServerMapTest.java  |   34 +-
 .../snapshot/AppDataSnapshotServerPojoTest.java |    6 +-
 .../codec/JavaSerializationStreamCodecTest.java |  196 +--
 .../ByteArrayToStringConverterTest.java         |    7 +-
 .../MapToKeyValuePairConverterTest.java         |   11 +-
 .../StringValueToNumberConverterForMapTest.java |   30 +-
 .../lib/counters/BasicCountersTest.java         |    7 +-
 .../lib/db/KeyValueStoreOperatorTest.java       |   21 +-
 ...ransactionableKeyValueStoreOperatorTest.java |    5 +-
 ...NonTransactionalBatchOutputOperatorTest.java |  299 ++---
 .../JdbcNonTransactionalOutputOperatorTest.java |   16 +-
 .../db/jdbc/JdbcNonTransactionalStoreTest.java  |   12 +-
 .../lib/formatter/JsonFormatterTest.java        |   12 +-
 .../lib/formatter/XmlFormatterTest.java         |   10 +-
 .../lib/helper/OperatorContextTestHelper.java   |    2 +-
 .../helper/SamplePubSubWebSocketServlet.java    |   12 +-
 .../lib/io/ApacheRandomLogsTest.java            |   63 +-
 .../lib/io/HttpJsonChunksInputOperatorTest.java |    8 +-
 .../lib/io/HttpLinesInputOperatorTest.java      |    4 +-
 .../io/HttpMultiValuedMapGetOperatorTest.java   |    6 +-
 .../lib/io/HttpPostOutputOperatorTest.java      |    6 +-
 .../lib/io/IdempotentStorageManagerTest.java    |   11 +-
 .../io/PubSubWebSocketAppDataOperatorTest.java  |    3 +-
 .../lib/io/PubSubWebSocketAppDataQueryTest.java |    4 +-
 .../lib/io/PubSubWebSocketOperatorTest.java     |    6 +-
 .../lib/io/SmtpOutputOperatorTest.java          |   17 +-
 .../lib/io/SocketInputOperatorTest.java         |   17 +-
 .../io/WebSocketServerInputOperatorTest.java    |   10 +-
 ...actFileInputOperatorFailureHandlingTest.java |   50 +-
 .../io/fs/AbstractFileInputOperatorTest.java    |   94 +-
 .../io/fs/AbstractFileOutputOperatorTest.java   |  675 +++-------
 .../lib/io/fs/AbstractReconcilerTest.java       |    6 +-
 .../AbstractSingleFileOutputOperatorTest.java   |   57 +-
 .../AbstractWindowFileOutputOperatorTest.java   |   46 +-
 .../lib/io/fs/FSInputModuleAppTest.java         |    2 +-
 .../lib/io/fs/FileSplitterInputTest.java        |    9 +-
 .../lib/io/fs/TailFsInputOperatorTest.java      |    6 +-
 .../io/jms/JMSMultiPortOutputOperatorTest.java  |   21 +-
 .../lib/io/jms/JMSObjectInputOperatorTest.java  |   39 +-
 .../lib/io/jms/JMSOutputOperatorTest.java       |  345 ++---
 .../com/datatorrent/lib/io/jms/JMSTestBase.java |    7 +-
 .../io/jms/JMSTransactionableStoreTestBase.java |   51 +-
 .../lib/join/MapTimeBasedJoinOperator.java      |    4 +-
 .../lib/join/POJOTimeBasedJoinOperatorTest.java |   12 +-
 .../lib/logs/ApacheLogParseOperatorTest.java    |  100 +-
 .../logs/FilteredLineToTokenArrayListTest.java  |    9 +-
 .../logs/FilteredLineToTokenHashMapTest.java    |  102 +-
 .../logs/FilteredLineTokenizerKeyValTest.java   |   11 +-
 .../lib/logs/LineToTokenArrayListTest.java      |  126 +-
 .../lib/logs/LineToTokenHashMapTest.java        |  106 +-
 .../lib/logs/LineTokenizerKeyValTest.java       |   84 +-
 .../MultiWindowDimensionAggregationTest.java    |    7 +-
 .../lib/logs/RegexMatchMapOperatorTest.java     |   25 +-
 .../lib/logs/TopNUniqueSiteStatsTest.java       |   94 +-
 .../datatorrent/lib/math/AverageKeyValTest.java |   93 +-
 .../com/datatorrent/lib/math/AverageTest.java   |  120 +-
 .../lib/math/ChangeAlertKeyValTest.java         |  116 +-
 .../lib/math/ChangeAlertMapTest.java            |  134 +-
 .../datatorrent/lib/math/ChangeKeyValTest.java  |  128 +-
 .../com/datatorrent/lib/math/ChangeTest.java    |   82 +-
 .../lib/math/CompareExceptMapTest.java          |   28 +-
 .../datatorrent/lib/math/CompareMapTest.java    |    8 +-
 .../datatorrent/lib/math/CountKeyValTest.java   |   14 +-
 .../lib/math/CountOccuranceTest.java            |   14 +-
 .../com/datatorrent/lib/math/DivisionTest.java  |  123 +-
 .../com/datatorrent/lib/math/ExceptMapTest.java |   16 +-
 .../lib/math/LogicalCompareTest.java            |  120 +-
 .../lib/math/LogicalCompareToConstantTest.java  |  116 +-
 .../datatorrent/lib/math/MarginKeyValTest.java  |   92 +-
 .../com/datatorrent/lib/math/MarginMapTest.java |   96 +-
 .../com/datatorrent/lib/math/MarginTest.java    |    2 +-
 .../com/datatorrent/lib/math/MaxKeyValTest.java |   28 +-
 .../java/com/datatorrent/lib/math/MaxTest.java  |    6 +-
 .../com/datatorrent/lib/math/MinKeyValTest.java |  188 ++-
 .../java/com/datatorrent/lib/math/MinTest.java  |   10 +-
 .../lib/math/MultiplyByConstantTest.java        |    2 +-
 .../datatorrent/lib/math/QuotientMapTest.java   |  100 +-
 .../com/datatorrent/lib/math/QuotientTest.java  |  117 +-
 .../datatorrent/lib/math/RangeKeyValTest.java   |   30 +-
 .../com/datatorrent/lib/math/RangeTest.java     |  152 +--
 .../lib/math/RunningAverageTest.java            |   58 +-
 .../com/datatorrent/lib/math/SigmaTest.java     |   68 +-
 .../lib/math/SquareCalculusTest.java            |   58 +-
 .../datatorrent/lib/math/SumCountMapTest.java   |  227 ++--
 .../com/datatorrent/lib/math/SumKeyValTest.java |   14 +-
 .../java/com/datatorrent/lib/math/SumTest.java  |  124 +-
 .../XmlKeyValueStringCartesianProductTest.java  |    5 +-
 .../multiwindow/MultiWindowRangeKeyValTest.java |   54 +-
 .../multiwindow/MultiWindowSumKeyValTest.java   |   54 +-
 .../multiwindow/SimpleMovingAverageTest.java    |   20 +-
 .../lib/multiwindow/SlidingWindowTest.java      |  110 +-
 .../lib/multiwindow/SortedMovingWindowTest.java |   55 +-
 .../partitioner/StatelessPartitionerTest.java   |   21 +-
 ...StatelessThroughputBasedPartitionerTest.java |   12 +-
 .../lib/script/JavaScriptOperatorTest.java      |    2 +-
 .../lib/statistics/MeridianOperatorTest.java    |    6 +-
 .../lib/statistics/ModeOperatorTest.java        |    6 +-
 .../statistics/WeightedMeanOperatorTest.java    |    6 +-
 .../lib/stream/ArrayListAggregatorTest.java     |   53 +-
 .../lib/stream/ArrayListToItemTest.java         |   46 +-
 .../lib/stream/ConsolidatorKeyValTest.java      |   36 +-
 .../com/datatorrent/lib/stream/CounterTest.java |   57 +-
 .../lib/stream/DevNullCounterTest.java          |   48 +-
 .../com/datatorrent/lib/stream/DevNullTest.java |   17 +-
 .../lib/stream/HashMapToKeyValPairTest.java     |   54 +-
 .../lib/stream/JsonByteArrayOperatorTest.java   |  139 +--
 .../lib/stream/KeyPairToHashMapTest.java        |   48 +-
 .../lib/stream/RoundRobinHashMapTest.java       |   96 +-
 .../lib/stream/StreamDuplicaterTest.java        |   59 +-
 .../lib/stream/StreamMergerTest.java            |    4 +-
 .../lib/streamquery/DeleteOperatorTest.java     |   79 +-
 .../streamquery/FullOuterJoinOperatorTest.java  |   97 +-
 .../lib/streamquery/GroupByOperatorTest.java    |   83 +-
 .../lib/streamquery/HavingOperatorTest.java     |   83 +-
 .../lib/streamquery/InnerJoinOperatorTest.java  |   97 +-
 .../streamquery/LeftOuterJoinOperatorTest.java  |   95 +-
 .../lib/streamquery/OrderByOperatorTest.java    |  109 +-
 .../streamquery/RightOuterJoinOperatorTest.java |   89 +-
 .../lib/streamquery/SelectOperatorTest.java     |   84 +-
 .../lib/streamquery/SelectTopOperatorTest.java  |    9 +-
 .../lib/streamquery/UpdateOperatorTest.java     |   82 +-
 .../advanced/BetweenConditionTest.java          |   84 +-
 .../advanced/CompoundConditionTest.java         |   90 +-
 .../streamquery/advanced/InConditionTest.java   |   83 +-
 .../streamquery/advanced/LikeConditionTest.java |   81 +-
 .../streamquery/advanced/NegateIndexTest.java   |   72 +-
 .../streamquery/advanced/SelectAverageTest.java |   72 +-
 .../streamquery/advanced/SelectCountTest.java   |   73 +-
 .../advanced/SelectFirstLastTest.java           |   73 +-
 .../streamquery/advanced/SelectMaxMinTest.java  |   73 +-
 .../lib/streamquery/advanced/SumIndexTest.java  |   73 +-
 .../lib/testbench/ActiveMQMessageGenerator.java |  213 ++--
 .../lib/testbench/EventClassifierTest.java      |  348 +++---
 .../lib/testbench/EventGeneratorTest.java       |   36 +-
 .../lib/testbench/EventIncrementerTest.java     |   35 +-
 .../testbench/FilteredEventClassifierTest.java  |   61 +-
 .../lib/testbench/RandomEventGeneratorTest.java |    8 +-
 .../lib/testbench/SeedEventClassifierTest.java  |   19 +-
 .../lib/testbench/SeedEventGeneratorTest.java   |    7 +-
 .../lib/testbench/ThroughputCounterTest.java    |   29 +-
 .../lib/transform/TransformOperatorAppTest.java |   18 +-
 .../DimensionTimeBucketSumOperatorTest.java     |   11 +-
 .../lib/util/JavaScriptFilterOperatorTest.java  |    4 +-
 .../lib/util/KryoCloneUtilsTest.java            |    7 +-
 .../com/datatorrent/lib/util/PojoUtilsTest.java |   65 +-
 .../com/datatorrent/lib/util/TestUtils.java     |    6 +-
 .../CustomTimeBucketRegistryTest.java           |    2 -
 .../dimensions/DimensionsDescriptorTest.java    |   18 +-
 .../state/managed/ManagedStateTestUtils.java    |    3 -
 .../apache/hadoop/io/file/tfile/DTFileTest.java |   65 +-
 .../apache/hadoop/io/file/tfile/TestDTFile.java |  108 +-
 .../io/file/tfile/TestDTFileByteArrays.java     |  255 ++--
 537 files changed, 11424 insertions(+), 12366 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/library-checkstyle-suppressions.xml
----------------------------------------------------------------------
diff --git a/library/library-checkstyle-suppressions.xml b/library/library-checkstyle-suppressions.xml
new file mode 100644
index 0000000..6e4240e
--- /dev/null
+++ b/library/library-checkstyle-suppressions.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0"?>
+<!--
+
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+-->
+<!DOCTYPE suppressions PUBLIC
+  "-//Puppy Crawl//DTD Suppressions 1.0//EN"
+  "http://www.puppycrawl.com/dtds/suppressions_1_0.dtd">
+
+<suppressions>
+  <suppress checks="RegexpMultiline" files="ConsoleOutputOperator.java"/>
+  <suppress checks="RegexpMultiline" files="CollectionMultiConsoleOutputOperator.java"/>
+  <suppress checks="RegexpMultiline" files="MapMultiConsoleOutputOperator.java"/>
+  <suppress checks="RegexpMultiline" files="ActiveMQMessageGenerator.java"/>
+  <suppress checks="[a-zA-Z0-9]*" files="DTBCFile.java"/>
+  <suppress checks="[a-zA-Z0-9]*" files="DTFile.java"/>
+  <suppress checks="[a-zA-Z0-9]*" files="TestTFile[a-zA-Z0-9]*.java"/>
+</suppressions>

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/pom.xml
----------------------------------------------------------------------
diff --git a/library/pom.xml b/library/pom.xml
index cb59ed3..7c23609 100644
--- a/library/pom.xml
+++ b/library/pom.xml
@@ -34,10 +34,6 @@
 
   <name>Apache Apex Malhar Library</name>
 
-  <properties>
-    <checkstyle.console>false</checkstyle.console>
-  </properties>
-
   <build>
     <plugins>
       <!-- Publish tests jar -->
@@ -187,8 +183,7 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-checkstyle-plugin</artifactId>
         <configuration>
-          <maxAllowedViolations>15995</maxAllowedViolations>
-          <logViolationsToConsole>${checkstyle.console}</logViolationsToConsole>
+          <suppressionsLocation>library-checkstyle-suppressions.xml</suppressionsLocation>
         </configuration>
       </plugin>
     </plugins>

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcher.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcher.java b/library/src/main/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcher.java
index 24d0209..252ea2f 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcher.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/AbstractStreamPatternMatcher.java
@@ -27,10 +27,10 @@ import org.apache.commons.lang3.mutable.MutableInt;
 
 import com.google.common.collect.Lists;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.annotation.OperatorAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * <p>
@@ -114,8 +114,7 @@ public abstract class AbstractStreamPatternMatcher<T> extends BaseOperator
           tempInt.increment();
           if (!pattern.checkState(t, tempInt.intValue())) {
             itr.remove();
-          }
-          else if (tempInt.equals(patternLength)) {
+          } else if (tempInt.equals(patternLength)) {
             itr.remove();
             processPatternFound();
           }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/BottomNUnifier.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/BottomNUnifier.java b/library/src/main/java/com/datatorrent/lib/algo/BottomNUnifier.java
index 1761ed3..8b76852 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/BottomNUnifier.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/BottomNUnifier.java
@@ -24,8 +24,8 @@ import java.util.Map;
 
 import javax.validation.constraints.Min;
 
-import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 import com.datatorrent.lib.util.TopNSort;
 
@@ -100,11 +100,11 @@ public class BottomNUnifier<K, V> implements Unifier<HashMap<K, ArrayList<V>>>
       if (pqueue == null) {
         pqueue = new TopNSort<V>(5, values.size(), false);
         kmap.put(e.getKey(), pqueue);
-        for (int i = (values.size()-1); i >= 0; i--) {
+        for (int i = (values.size() - 1); i >= 0; i--) {
           pqueue.offer(values.get(i));
         }
       } else {
-        for (int i = (values.size()-1); i >= 0; i--) {
+        for (int i = (values.size() - 1); i >= 0; i--) {
           pqueue.offer(values.get(i));
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/FilterKeyVals.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/FilterKeyVals.java b/library/src/main/java/com/datatorrent/lib/algo/FilterKeyVals.java
index f3996a3..41259a7 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/FilterKeyVals.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/FilterKeyVals.java
@@ -103,7 +103,8 @@ public class FilterKeyVals<K,V> extends BaseKeyOperator<K>
    * Gets the inverse property.
    * @return inverse
    */
-  public boolean getInverse() {
+  public boolean getInverse()
+  {
     return inverse;
   }
 
@@ -111,7 +112,8 @@ public class FilterKeyVals<K,V> extends BaseKeyOperator<K>
    * If true then only matches are emitted. If false then only non matches are emitted.
    * @param val
    */
-  public void setInverse(boolean val) {
+  public void setInverse(boolean val)
+  {
     inverse = val;
   }
 
@@ -120,7 +122,8 @@ public class FilterKeyVals<K,V> extends BaseKeyOperator<K>
    * @return keyvals hash
    */
   @NotNull()
-  public HashMap<HashMap<K,V>,Object> getKeyVals() {
+  public HashMap<HashMap<K,V>,Object> getKeyVals()
+  {
     return keyvals;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/FilterKeysHashMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/FilterKeysHashMap.java b/library/src/main/java/com/datatorrent/lib/algo/FilterKeysHashMap.java
index fab1d3d..0f9a738 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/FilterKeysHashMap.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/FilterKeysHashMap.java
@@ -99,10 +99,10 @@ public class FilterKeysHashMap<K, V> extends BaseKeyOperator<K>
             dtuple2.put(cloneKey(e2.getKey()), cloneValue(e2.getValue()));
           }
         }
-        if (dtuple == null && dtuple2 != null){
+        if (dtuple == null && dtuple2 != null) {
           dtuple = new HashMap<K, HashMap<K, V>>();
         }
-        if (dtuple != null && dtuple2 != null){
+        if (dtuple != null && dtuple2 != null) {
           dtuple.put(cloneKey(e.getKey()), dtuple2);
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/FilterKeysMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/FilterKeysMap.java b/library/src/main/java/com/datatorrent/lib/algo/FilterKeysMap.java
index 3e0f007..136a5d4 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/FilterKeysMap.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/FilterKeysMap.java
@@ -119,7 +119,8 @@ public class FilterKeysMap<K,V> extends BaseKeyOperator<K>
    * If true then only matches are emitted. If false then only non matches are emitted.
    * @return inverse
    */
-  public boolean getInverse() {
+  public boolean getInverse()
+  {
     return inverse;
   }
 
@@ -128,7 +129,8 @@ public class FilterKeysMap<K,V> extends BaseKeyOperator<K>
    * Sets the inverse property. If true then only matches are emitted. If false then only non matches are emitted.
    * @param val
    */
-  public void setInverse(boolean val) {
+  public void setInverse(boolean val)
+  {
     inverse = val;
   }
 
@@ -136,8 +138,9 @@ public class FilterKeysMap<K,V> extends BaseKeyOperator<K>
    * Adds a key to the filter list
    * @param str
    */
-  public void setKey(K str) {
-      keys.put(str, null);
+  public void setKey(K str)
+  {
+    keys.put(str, null);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/FilterValues.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/FilterValues.java b/library/src/main/java/com/datatorrent/lib/algo/FilterValues.java
index 0887d49..3b78ac4 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/FilterValues.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/FilterValues.java
@@ -22,11 +22,11 @@ import java.util.HashMap;
 
 import javax.validation.constraints.NotNull;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator filters the incoming stream of values by the specified set of filter values.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/FirstN.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/FirstN.java b/library/src/main/java/com/datatorrent/lib/algo/FirstN.java
index 19d7eca..d9db3cf 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/FirstN.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/FirstN.java
@@ -102,10 +102,11 @@ public class FirstN<K,V> extends AbstractBaseNOperatorMap<K, V>
 
   /**
    * First N number of KeyValue pairs for each Key.
+   *
    * @param val
    */
   public void setN(int val)
   {
-   super.setN(val);
+    super.setN(val);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/InsertSort.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/InsertSort.java b/library/src/main/java/com/datatorrent/lib/algo/InsertSort.java
index 2fd358a..d4414a4 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/InsertSort.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/InsertSort.java
@@ -56,8 +56,7 @@ import com.datatorrent.lib.util.AbstractBaseSortOperator;
 //
 // TODO: Override PriorityQueue and rewrite addAll to insert with location
 //
-public class InsertSort<K> extends AbstractBaseSortOperator<K> implements
-  Unifier<ArrayList<K>>
+public class InsertSort<K> extends AbstractBaseSortOperator<K> implements Unifier<ArrayList<K>>
 {
   /**
    * The input port on which individual tuples are received for sorting.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/InsertSortDesc.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/InsertSortDesc.java b/library/src/main/java/com/datatorrent/lib/algo/InsertSortDesc.java
index e2d52c2..4498cfb 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/InsertSortDesc.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/InsertSortDesc.java
@@ -18,19 +18,18 @@
  */
 package com.datatorrent.lib.algo;
 
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.PriorityQueue;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
-
 import com.datatorrent.lib.util.AbstractBaseSortOperator;
 import com.datatorrent.lib.util.ReversibleComparator;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.PriorityQueue;
-
 /**
  * This operator takes the values it receives each window and outputs them in ascending order at the end of each window.
  * <p>

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyArrayUnifier.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyArrayUnifier.java b/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyArrayUnifier.java
index 0039b43..b14e46f 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyArrayUnifier.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyArrayUnifier.java
@@ -22,8 +22,8 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyMap.java b/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyMap.java
index 571fa11..2996b5a 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyMap.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyMap.java
@@ -90,7 +90,7 @@ public class LeastFrequentKeyMap<K, V> extends AbstractBaseFrequentKey<K>
     public Unifier<HashMap<K, Integer>> getUnifier()
     {
       Unifier<HashMap<K, Integer>> ret = new UnifierHashMapFrequent<K>();
-      ((UnifierHashMapFrequent<K>) ret).setLeast(true);
+      ((UnifierHashMapFrequent<K>)ret).setLeast(true);
       return ret;
     }
   };
@@ -102,12 +102,12 @@ public class LeastFrequentKeyMap<K, V> extends AbstractBaseFrequentKey<K>
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<ArrayList<HashMap<K, Integer>>> list = new DefaultOutputPort<ArrayList<HashMap<K, Integer>>>()
-      {
+  {
     @Override
     public Unifier<ArrayList<HashMap<K, Integer>>> getUnifier()
     {
       Unifier<ArrayList<HashMap<K, Integer>>> ret = new UnifierArrayHashMapFrequent<K>();
-      ((UnifierArrayHashMapFrequent<K>) ret).setLeast(true);
+      ((UnifierArrayHashMapFrequent<K>)ret).setLeast(true);
       return ret;
     }
   };

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyUnifier.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyUnifier.java b/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyUnifier.java
index b2e6180..de899b6 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyUnifier.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentKeyUnifier.java
@@ -21,8 +21,8 @@ package com.datatorrent.lib.algo;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.Unifier;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentValue.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentValue.java b/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentValue.java
index e45df96..ac91e96 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentValue.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/LeastFrequentValue.java
@@ -79,7 +79,7 @@ public class LeastFrequentValue<K> extends AbstractBaseFrequentKey<K>
    * which occurred the least number of times,
    * is emitted.
    */
-  @OutputPortFieldAnnotation(optional=true)
+  @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<HashMap<K, Integer>> least = new DefaultOutputPort<HashMap<K, Integer>>()
   {
     @SuppressWarnings({ "rawtypes", "unchecked" })
@@ -95,7 +95,7 @@ public class LeastFrequentValue<K> extends AbstractBaseFrequentKey<K>
    * which occurred the least number of times,
    * is emitted.
    */
-  @OutputPortFieldAnnotation(optional=true)
+  @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<ArrayList<HashMap<K, Integer>>> list = new DefaultOutputPort<ArrayList<HashMap<K, Integer>>>()
   {
     @SuppressWarnings({ "rawtypes", "unchecked" })

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/MatchMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/MatchMap.java b/library/src/main/java/com/datatorrent/lib/algo/MatchMap.java
index 7fba6e6..e84c88b 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/MatchMap.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/MatchMap.java
@@ -88,8 +88,7 @@ public class MatchMap<K,V extends Number> extends BaseMatchOperator<K, V>
       }
       if (compareValue(v.doubleValue())) {
         tupleMatched(tuple);
-      }
-      else {
+      } else {
         tupleNotMatched(tuple);
       }
     }
@@ -98,7 +97,7 @@ public class MatchMap<K,V extends Number> extends BaseMatchOperator<K, V>
   /**
    * The output port which emits filtered key value pairs.
    */
-  @OutputPortFieldAnnotation(optional=true)
+  @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<HashMap<K, V>> match = new DefaultOutputPort<HashMap<K, V>>()
   {
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/MergeSort.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/MergeSort.java b/library/src/main/java/com/datatorrent/lib/algo/MergeSort.java
index 6db4783..9ccf76c 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/MergeSort.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/MergeSort.java
@@ -61,126 +61,134 @@ import com.datatorrent.api.annotation.OperatorAnnotation;
 @OperatorAnnotation(partitionable = true)
 public abstract class MergeSort<K>  implements Operator, Unifier<ArrayList<K>>
 {
-	/**
-	 * Sorted merged list.
-	 */
-	private ArrayList<K> mergedList = null;
-
-	/**
-	 * The input port which receives lists to be merged and sorted.
-	 */
+  /**
+   * Sorted merged list.
+   */
+  private ArrayList<K> mergedList = null;
+
+  /**
+   * The input port which receives lists to be merged and sorted.
+   */
   public final transient DefaultInputPort<ArrayList<K>> data = new DefaultInputPort<ArrayList<K>>()
   {
-  	/**
-  	 * Merge incoming tuple.
-  	 */
+    /**
+     * Merge incoming tuple.
+     */
     @Override
     public void process(ArrayList<K> tuple)
     {
-    	mergedList = processMergeList(mergedList, tuple);
+      mergedList = processMergeList(mergedList, tuple);
     }
   };
 
   /**
    * The output port which emits merged and sorted lists.
    */
-  public final transient DefaultOutputPort<ArrayList<K>> sort = new DefaultOutputPort<ArrayList<K>>() {
-  		@Override
-  		public Unifier<ArrayList<K>> getUnifier()
-  		{
-  			return getUnifierInstance();
-  		}
+  public final transient DefaultOutputPort<ArrayList<K>> sort = new DefaultOutputPort<ArrayList<K>>()
+  {
+    @Override
+    public Unifier<ArrayList<K>> getUnifier()
+    {
+      return getUnifierInstance();
+    }
   };
 
-	@Override
-	public void setup(OperatorContext context)
-	{
-		// TODO Auto-generated method stub
-
-	}
-
-	@Override
-	public void teardown()
-	{
-		// TODO Auto-generated method stub
-
-	}
-	@Override
-	public void beginWindow(long windowId)
-	{
-		mergedList = null;
-	}
-	@Override
-	public void endWindow()
-	{
-		sort.emit(mergedList);
-		mergedList = null;
-	}
-
-	/**
-	 *  Sorted parameter list are merged into sorted merged output list.
-	 *
-	 * @param list1 sorted list aggregated by operator
-	 * @param list2 Input port sorted list to be merged.
-	 * @return sorted merged output list.
-	 */
-	protected ArrayList<K> processMergeList(ArrayList<K> list1,
-			ArrayList<K> list2)
-	{
-		// null lists
-		if (list1 == null) return list2;
-		if (list2 == null) return list1;
-
-		// Create output list
-		ArrayList<K> result = new ArrayList<K>();
-		int index1 = 0;
-		int index2 = 0;
-	  while (true) {
-
-	  	// list1 is exhausted
-	  	if (index1 == list1.size()) {
-	  		while(index2 < list2.size()) {
-	  			result.add(list2.get(index2++));
-	  		}
-	  		break;
-	  	}
-
-	  	// list2 is exhausted
-	  	if (index2 == list2.size()) {
-	  		while(index1 < list1.size()) {
-	  			result.add(list1.get(index1++));
-	  		}
-	  		break;
-	  	}
-
-	  	// compare values
-	  	K val1 = list1.get(index1++);
-	  	K val2 = list2.get(index2++);
-	  	K[] vals = compare(val1, val2);
-	  	result.add(vals[0]);
-	  	if (vals[1] != null) result.add(vals[1]);
-	  }
-
-	  // done
+  @Override
+  public void setup(OperatorContext context)
+  {
+    // TODO Auto-generated method stub
+
+  }
+
+  @Override
+  public void teardown()
+  {
+    // TODO Auto-generated method stub
+  }
+
+  @Override
+  public void beginWindow(long windowId)
+  {
+    mergedList = null;
+  }
+
+  @Override
+  public void endWindow()
+  {
+    sort.emit(mergedList);
+    mergedList = null;
+  }
+
+  /**
+   *  Sorted parameter list are merged into sorted merged output list.
+   *
+   * @param list1 sorted list aggregated by operator
+   * @param list2 Input port sorted list to be merged.
+   * @return sorted merged output list.
+   */
+  protected ArrayList<K> processMergeList(ArrayList<K> list1,
+      ArrayList<K> list2)
+  {
+    // null lists
+    if (list1 == null) {
+      return list2;
+    }
+    if (list2 == null) {
+      return list1;
+    }
+
+    // Create output list
+    ArrayList<K> result = new ArrayList<K>();
+    int index1 = 0;
+    int index2 = 0;
+    while (true) {
+
+      // list1 is exhausted
+      if (index1 == list1.size()) {
+        while (index2 < list2.size()) {
+          result.add(list2.get(index2++));
+        }
+        break;
+      }
+
+      // list2 is exhausted
+      if (index2 == list2.size()) {
+        while (index1 < list1.size()) {
+          result.add(list1.get(index1++));
+        }
+        break;
+      }
+
+      // compare values
+      K val1 = list1.get(index1++);
+      K val2 = list2.get(index2++);
+      K[] vals = compare(val1, val2);
+      result.add(vals[0]);
+      if (vals[1] != null) {
+        result.add(vals[1]);
+      }
+    }
+
+    // done
     return result;
-	}
-
-	/**
-	 * Unifier process function implementation.
-	 */
-	@Override
-	public void process(ArrayList<K> tuple)
-	{
-		mergedList = processMergeList(mergedList, tuple);
-	}
-
-	/**
-	 * abstract sort function to be implemented by sub class.
-	 */
-	abstract public  K[] compare(K val1, K val2);
-
-	/**
-	 *  Get output port unifier instance, sub class should return new instance of itself.
-	 */
-	abstract public Unifier<ArrayList<K>> getUnifierInstance();
+  }
+
+  /**
+   * Unifier process function implementation.
+   */
+  @Override
+  public void process(ArrayList<K> tuple)
+  {
+    mergedList = processMergeList(mergedList, tuple);
+  }
+
+  /**
+   * abstract sort function to be implemented by sub class.
+   */
+  public abstract K[] compare(K val1, K val2);
+
+  /**
+   *  Get output port unifier instance, sub class should return new instance of itself.
+   */
+  public abstract Unifier<ArrayList<K>> getUnifierInstance();
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/MergeSortNumber.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/MergeSortNumber.java b/library/src/main/java/com/datatorrent/lib/algo/MergeSortNumber.java
index 3e73619..e9d0eff 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/MergeSortNumber.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/MergeSortNumber.java
@@ -18,11 +18,13 @@
  */
 package com.datatorrent.lib.algo;
 
-import com.datatorrent.api.annotation.OperatorAnnotation;
 import java.util.ArrayList;
 
+import com.datatorrent.api.annotation.OperatorAnnotation;
+
 /**
- * This unifier takes sorted lists of tuples each window and merges them into one large sorted list at the end of each window.
+ * This unifier takes sorted lists of tuples each window and merges them into one large sorted list at the end of
+ * each window.
  * <p>
  * Incoming sorted list is merged into already existing sorted list. The input list is expected to be sorted. <b>
  * At the end of the window, merged sorted list is emitted on sort output port. <br>
@@ -51,51 +53,53 @@ import java.util.ArrayList;
 @OperatorAnnotation(partitionable = true)
 public class MergeSortNumber<V extends Number> extends MergeSort<V>
 {
-	/**
-	 * Ascending/Desending flag;
-	 */
-	private boolean ascending = true;
+  /**
+   * Ascending/Desending flag;
+   */
+  private boolean ascending = true;
 
-	/**
-	 * sort function.
-	 */
-	@SuppressWarnings("unchecked")
-	public  V[] compare(V val1, V val2) {
-		V[] result =  (V[]) new Number[2];
-		if (ascending) {
-  		if (val1.doubleValue() < val2.doubleValue()) {
-  			result[0] = val1;
-  			result[1] = val2;
-  		} else {
-  			result[0] = val2;
-  			result[1] = val1;
-  		}
-		} else {
-  		if (val1.doubleValue() < val2.doubleValue()) {
-  			result[0] = val2;
-  			result[1] = val1;
-  		} else {
-  			result[0] = val1;
-  			result[1] = val2;
-  		}
-		}
-		return result;
-	}
+  /**
+   * sort function.
+   */
+  @SuppressWarnings("unchecked")
+  public V[] compare(V val1, V val2)
+  {
+    V[] result = (V[])new Number[2];
+    if (ascending) {
+      if (val1.doubleValue() < val2.doubleValue()) {
+        result[0] = val1;
+        result[1] = val2;
+      } else {
+        result[0] = val2;
+        result[1] = val1;
+      }
+    } else {
+      if (val1.doubleValue() < val2.doubleValue()) {
+        result[0] = val2;
+        result[1] = val1;
+      } else {
+        result[0] = val1;
+        result[1] = val2;
+      }
+    }
+    return result;
+  }
 
-	/**
-	 *  Merge class itself is unifier.
-	 */
-	public Unifier<ArrayList<V>> getUnifierInstance() {
-		return new MergeSortNumber<V>();
-	}
+  /**
+   *  Merge class itself is unifier.
+   */
+  public Unifier<ArrayList<V>> getUnifierInstance()
+  {
+    return new MergeSortNumber<V>();
+  }
 
-	public boolean isAscending()
-	{
-		return ascending;
-	}
+  public boolean isAscending()
+  {
+    return ascending;
+  }
 
-	public void setAscending(boolean ascending)
-	{
-		this.ascending = ascending;
-	}
+  public void setAscending(boolean ascending)
+  {
+    this.ascending = ascending;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/MostFrequentKeyMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/MostFrequentKeyMap.java b/library/src/main/java/com/datatorrent/lib/algo/MostFrequentKeyMap.java
index 442546d..c300c24 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/MostFrequentKeyMap.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/MostFrequentKeyMap.java
@@ -88,7 +88,7 @@ public class MostFrequentKeyMap<K,V> extends AbstractBaseFrequentKey<K>
     public Unifier<HashMap<K, Integer>> getUnifier()
     {
       Unifier<HashMap<K, Integer>> ret = new UnifierHashMapFrequent<K>();
-      ((UnifierHashMapFrequent<K>) ret).setLeast(false);
+      ((UnifierHashMapFrequent<K>)ret).setLeast(false);
       return ret;
     }
   };
@@ -101,7 +101,7 @@ public class MostFrequentKeyMap<K,V> extends AbstractBaseFrequentKey<K>
     public Unifier<ArrayList<HashMap<K, Integer>>> getUnifier()
     {
       Unifier<ArrayList<HashMap<K, Integer>>> ret = new UnifierArrayHashMapFrequent<K>();
-      ((UnifierHashMapFrequent) ret).setLeast(false);
+      ((UnifierHashMapFrequent)ret).setLeast(false);
       return ret;
     }
   };

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/MostFrequentValue.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/MostFrequentValue.java b/library/src/main/java/com/datatorrent/lib/algo/MostFrequentValue.java
index 7388ad0..ca4aedf 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/MostFrequentValue.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/MostFrequentValue.java
@@ -104,7 +104,7 @@ public class MostFrequentValue<K> extends AbstractBaseFrequentKey<K>
     public Unifier<ArrayList<HashMap<K, Integer>>> getUnifier()
     {
       Unifier<ArrayList<HashMap<K, Integer>>> ret = new UnifierArrayHashMapFrequent<K>();
-      ((UnifierHashMapFrequent) ret).setLeast(false);
+      ((UnifierHashMapFrequent)ret).setLeast(false);
       return ret;
     }
   };

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/algo/UniqueCounter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/algo/UniqueCounter.java b/library/src/main/java/com/datatorrent/lib/algo/UniqueCounter.java
index 66bfa60..1b6d944 100644
--- a/library/src/main/java/com/datatorrent/lib/algo/UniqueCounter.java
+++ b/library/src/main/java/com/datatorrent/lib/algo/UniqueCounter.java
@@ -105,8 +105,7 @@ public class UniqueCounter<K> extends BaseUniqueKeyCounter<K>
     if (tuple != null) {
       count.emit(tuple);
     }
-    if(!cumulative)
-    {
+    if (!cumulative) {
       map.clear();
     }
   }
@@ -115,7 +114,8 @@ public class UniqueCounter<K> extends BaseUniqueKeyCounter<K>
    * Gets the cumulative mode.
    * @return The cumulative mode.
    */
-  public boolean isCumulative() {
+  public boolean isCumulative()
+  {
     return cumulative;
   }
 
@@ -126,7 +126,8 @@ public class UniqueCounter<K> extends BaseUniqueKeyCounter<K>
    * could eventually run out of memory.
    * @param cumulative
    */
-  public void setCumulative(boolean cumulative) {
+  public void setCumulative(boolean cumulative)
+  {
     this.cumulative = cumulative;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/QueueUtils.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/QueueUtils.java b/library/src/main/java/com/datatorrent/lib/appdata/QueueUtils.java
index 4213aae..f8880bb 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/QueueUtils.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/QueueUtils.java
@@ -43,14 +43,14 @@ public class QueueUtils
 
     public void lock()
     {
-      synchronized(lock) {
+      synchronized (lock) {
         locked = true;
       }
     }
 
     public void unlock()
     {
-      synchronized(lock) {
+      synchronized (lock) {
         locked = false;
         lock.notifyAll();
       }
@@ -58,12 +58,11 @@ public class QueueUtils
 
     public void gate()
     {
-      synchronized(lock) {
-        while(locked) {
+      synchronized (lock) {
+        while (locked) {
           try {
             lock.wait();
-          }
-          catch(InterruptedException ex) {
+          } catch (InterruptedException ex) {
             //Do nothing
           }
         }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/StoreUtils.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/StoreUtils.java b/library/src/main/java/com/datatorrent/lib/appdata/StoreUtils.java
index e6712d1..31c3588 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/StoreUtils.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/StoreUtils.java
@@ -18,16 +18,15 @@
  */
 package com.datatorrent.lib.appdata;
 
-import com.google.common.base.Preconditions;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.lib.io.SimpleSinglePortInputOperator.BufferingOutputPort;
+import com.google.common.base.Preconditions;
 
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Sink;
+import com.datatorrent.lib.io.SimpleSinglePortInputOperator.BufferingOutputPort;
 
 /**
  * @since 3.3.0
@@ -44,25 +43,23 @@ public class StoreUtils
    */
   public static <T> void attachOutputPortToInputPort(DefaultOutputPort<T> outputPort, final DefaultInputPort<T> inputPort)
   {
-    outputPort.setSink(
-      new Sink<Object>()
+    outputPort.setSink(new Sink<Object>()
+    {
+      @Override
+      @SuppressWarnings("unchecked")
+      public void put(Object tuple)
       {
-        @Override
-        @SuppressWarnings("unchecked")
-        public void put(Object tuple)
-        {
-          LOG.debug("processing tuple");
-          inputPort.process((T)tuple);
-        }
-
-        @Override
-        public int getCount(boolean reset)
-        {
-          return 0;
-        }
+        LOG.debug("processing tuple");
+        inputPort.process((T)tuple);
+      }
 
+      @Override
+      public int getCount(boolean reset)
+      {
+        return 0;
       }
-    );
+
+    });
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlush.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlush.java b/library/src/main/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlush.java
index 44697d5..bed700b 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlush.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/datastructs/CacheLRUSynchronousFlush.java
@@ -18,9 +18,6 @@
  */
 package com.datatorrent.lib.appdata.datastructs;
 
-import it.unimi.dsi.fastutil.longs.Long2ObjectAVLTreeMap;
-import it.unimi.dsi.fastutil.longs.Long2ObjectSortedMap;
-
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
@@ -30,6 +27,9 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
+import it.unimi.dsi.fastutil.longs.Long2ObjectAVLTreeMap;
+import it.unimi.dsi.fastutil.longs.Long2ObjectSortedMap;
+
 /**
  * This is an LRU cache.
  * @param <KEY> The type of keys in the cache.
@@ -53,8 +53,7 @@ public class CacheLRUSynchronousFlush<KEY, VALUE>
     setFlushListener(flushListener);
   }
 
-  public CacheLRUSynchronousFlush(int flushedSize,
-                                  CacheFlushListener<KEY, VALUE> flushListener)
+  public CacheLRUSynchronousFlush(int flushedSize, CacheFlushListener<KEY, VALUE> flushListener)
   {
     setFlushedSizePri(flushedSize);
     setFlushListener(flushListener);
@@ -103,10 +102,10 @@ public class CacheLRUSynchronousFlush<KEY, VALUE>
 
     Long oldTimeStamp = keyToTimeStamp.put(key, timeStamp);
 
-    if(oldTimeStamp == null || oldTimeStamp != timeStamp) {
+    if (oldTimeStamp == null || oldTimeStamp != timeStamp) {
       Set<KEY> keys = timeStampToKey.get(timeStamp);
 
-      if(keys == null) {
+      if (keys == null) {
         keys = Sets.newHashSet();
         timeStampToKey.put(timeStamp, keys);
       }
@@ -114,7 +113,7 @@ public class CacheLRUSynchronousFlush<KEY, VALUE>
       keys.add(key);
     }
 
-    if(oldTimeStamp != null) {
+    if (oldTimeStamp != null) {
       timeStampToKey.get(oldTimeStamp).remove(key);
     }
 
@@ -132,7 +131,7 @@ public class CacheLRUSynchronousFlush<KEY, VALUE>
     Preconditions.checkNotNull(key);
 
     Long timeStamp = keyToTimeStamp.get(key);
-    if(timeStamp != null) {
+    if (timeStamp != null) {
       keyToTimeStamp.remove(key);
       Set<KEY> keys = timeStampToKey.get(timeStamp);
       keys.remove(key);
@@ -146,17 +145,17 @@ public class CacheLRUSynchronousFlush<KEY, VALUE>
   {
     int currentSize = keyToValue.size();
 
-    while(currentSize > flushedSize) {
+    while (currentSize > flushedSize) {
       long firstKey = timeStampToKey.firstLongKey();
       Set<KEY> keys = timeStampToKey.get(firstKey);
 
       Iterator<KEY> keyIterator = keys.iterator();
 
-      while(keyIterator.hasNext() && currentSize > flushedSize) {
+      while (keyIterator.hasNext() && currentSize > flushedSize) {
         KEY key = keyIterator.next();
         VALUE value = keyToValue.remove(key);
 
-        if(value == null) {
+        if (value == null) {
           continue;
         }
 
@@ -166,7 +165,7 @@ public class CacheLRUSynchronousFlush<KEY, VALUE>
         flushListener.flush(key, value);
       }
 
-      if(keys.isEmpty()) {
+      if (keys.isEmpty()) {
         timeStampToKey.remove(firstKey);
       }
     }
@@ -174,7 +173,7 @@ public class CacheLRUSynchronousFlush<KEY, VALUE>
 
   public void flushChanges()
   {
-    for(KEY key: changed) {
+    for (KEY key : changed) {
       VALUE value = keyToValue.get(key);
       flushListener.flush(key, value);
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/datastructs/DimensionalTable.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/datastructs/DimensionalTable.java b/library/src/main/java/com/datatorrent/lib/appdata/datastructs/DimensionalTable.java
index 3cb105b..be228fa 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/datastructs/DimensionalTable.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/datastructs/DimensionalTable.java
@@ -23,14 +23,14 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * This is a {@link DimensionalTable}. A {@link DimensionalTable} is similar to a Map but is a hybrid
  * between a conventional table and a map. Data in a {@link DimensionalTable} is organized into rows
@@ -134,9 +134,7 @@ public class DimensionalTable<DATA>
    */
   private void initialize()
   {
-    for(int columnIndex = 0;
-        columnIndex < dimensionNameToIndex.size();
-        columnIndex++) {
+    for (int columnIndex = 0; columnIndex < dimensionNameToIndex.size(); columnIndex++) {
       dimensionColumns.add(Lists.newArrayList());
     }
   }
@@ -150,19 +148,16 @@ public class DimensionalTable<DATA>
     Preconditions.checkNotNull(headerNames);
     Preconditions.checkArgument(!headerNames.isEmpty(), "headerNames");
 
-    for(String headerName: headerNames) {
+    for (String headerName : headerNames) {
       Preconditions.checkNotNull(headerName);
     }
 
     Set<String> headerNameSet = Sets.newHashSet(headerNames);
 
     Preconditions.checkArgument(headerNameSet.size() == headerNames.size(),
-                                "The provided list of header names has duplicate names: " +
-                                headerNames);
+        "The provided list of header names has duplicate names: " + headerNames);
 
-    for(int index = 0;
-        index < headerNames.size();
-        index++) {
+    for (int index = 0; index < headerNames.size(); index++) {
       dimensionNameToIndex.put(headerNames.get(index), index);
     }
   }
@@ -179,25 +174,23 @@ public class DimensionalTable<DATA>
     Preconditions.checkNotNull(data);
     Preconditions.checkNotNull(keys);
     Preconditions.checkArgument(keys.length == dimensionNameToIndex.size(),
-                                "All the dimension keys should be specified.");
+        "All the dimension keys should be specified.");
 
     List<Object> keysList = Lists.newArrayList();
 
-    for(Object key: keys) {
+    for (Object key : keys) {
       keysList.add(key);
     }
 
     DATA prev = dimensionKeysToData.put(keysList, data);
 
-    if(prev != null) {
+    if (prev != null) {
       return;
     }
 
     dataColumn.add(data);
 
-    for(int index = 0;
-        index < keys.length;
-        index++) {
+    for (int index = 0; index < keys.length; index++) {
       Object key = keys[index];
       dimensionColumns.get(index).add(key);
     }
@@ -216,7 +209,7 @@ public class DimensionalTable<DATA>
 
     Object[] keysArray = new Object[keys.size()];
 
-    for(Map.Entry<String, ?> entry: keys.entrySet()) {
+    for (Map.Entry<String, ?> entry : keys.entrySet()) {
       String keyName = entry.getKey();
       Object value = entry.getValue();
 
@@ -240,8 +233,7 @@ public class DimensionalTable<DATA>
   public DATA getDataPoint(List<?> keys)
   {
     Preconditions.checkNotNull(keys);
-    Preconditions.checkArgument(keys.size() == dimensionNameToIndex.size(),
-                                "All the keys must be specified.");
+    Preconditions.checkArgument(keys.size() == dimensionNameToIndex.size(), "All the keys must be specified.");
 
     return dimensionKeysToData.get(keys);
   }
@@ -256,18 +248,15 @@ public class DimensionalTable<DATA>
   public DATA getDataPoint(Map<String, ?> keys)
   {
     Preconditions.checkNotNull(keys);
-    Preconditions.checkArgument(keys.size() == dimensionNameToIndex.size(),
-                                "All the keys must be specified.");
+    Preconditions.checkArgument(keys.size() == dimensionNameToIndex.size(), "All the keys must be specified.");
 
     List<Object> keysList = Lists.newArrayList();
 
-    for(int index = 0;
-        index < dimensionNameToIndex.size();
-        index++) {
+    for (int index = 0; index < dimensionNameToIndex.size(); index++) {
       keysList.add(null);
     }
 
-    for(Map.Entry<String, ?> entry: keys.entrySet()) {
+    for (Map.Entry<String, ?> entry : keys.entrySet()) {
       String key = entry.getKey();
       Object value = entry.getValue();
       Integer index = dimensionNameToIndex.get(key);
@@ -289,14 +278,14 @@ public class DimensionalTable<DATA>
     Preconditions.checkNotNull(keys);
 
     Preconditions.checkArgument(dimensionNameToIndex.keySet().containsAll(keys.keySet()),
-                                "The given keys contain names which are not valid keys.");
+        "The given keys contain names which are not valid keys.");
 
     List<Integer> indices = Lists.newArrayList();
     List<List<Object>> keyColumns = Lists.newArrayList();
 
     Map<Integer, Object> indexToKey = Maps.newHashMap();
 
-    for(Map.Entry<String, ?> entry: keys.entrySet()) {
+    for (Map.Entry<String, ?> entry : keys.entrySet()) {
       String dimensionName = entry.getKey();
       Object value = entry.getValue();
       Integer index = dimensionNameToIndex.get(dimensionName);
@@ -308,7 +297,7 @@ public class DimensionalTable<DATA>
     Collections.sort(indices);
     List<Object> tempKeys = Lists.newArrayList();
 
-    for(Integer index: indices) {
+    for (Integer index : indices) {
       tempKeys.add(indexToKey.get(index));
       keyColumns.add(dimensionColumns.get(index));
     }
@@ -316,27 +305,22 @@ public class DimensionalTable<DATA>
     int numRows = keyColumns.get(0).size();
     List<DATA> results = Lists.newArrayList();
 
-    for(int rowIndex = 0;
-        rowIndex < numRows;
-        rowIndex++)
-    {
+    for (int rowIndex = 0; rowIndex < numRows; rowIndex++) {
       boolean allEqual = true;
 
-      for(int columnIndex = 0;
-          columnIndex < tempKeys.size();
-          columnIndex++) {
+      for (int columnIndex = 0; columnIndex < tempKeys.size(); columnIndex++) {
         Object key = tempKeys.get(columnIndex);
         Object keyColumn = keyColumns.get(columnIndex).get(rowIndex);
 
-        if((key == null && keyColumn != null) ||
-           (key != null && keyColumn == null) ||
-           (key != null && keyColumn != null && !keyColumn.equals(key))) {
+        if ((key == null && keyColumn != null) ||
+            (key != null && keyColumn == null) ||
+            (key != null && !keyColumn.equals(key))) {
           allEqual = false;
           break;
         }
       }
 
-      if(allEqual) {
+      if (allEqual) {
         results.add(dataColumn.get(rowIndex));
       }
     }
@@ -357,7 +341,8 @@ public class DimensionalTable<DATA>
    * Returns the number of rows in the table.
    * @return The number of rows in the table.
    */
-  public int size() {
+  public int size()
+  {
     return dataColumn.size();
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOByteArrayList.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOByteArrayList.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOByteArrayList.java
index 44f2f7d..0819941 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOByteArrayList.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOByteArrayList.java
@@ -18,14 +18,14 @@
  */
 package com.datatorrent.lib.appdata.gpo;
 
+import java.util.Collection;
+import java.util.Iterator;
+
 import it.unimi.dsi.fastutil.bytes.ByteArrayList;
 import it.unimi.dsi.fastutil.bytes.ByteCollection;
 import it.unimi.dsi.fastutil.bytes.ByteIterator;
 import it.unimi.dsi.fastutil.bytes.ByteList;
 
-import java.util.Collection;
-import java.util.Iterator;
-
 /**
  * This is a helper class which stores primitive bytes in an array list. This is useful
  * for serialization and deserialization.
@@ -86,9 +86,7 @@ public class GPOByteArrayList extends ByteArrayList
 
   public boolean add(byte[] bytes)
   {
-    for(int byteCounter = 0;
-        byteCounter < bytes.length;
-        byteCounter++) {
+    for (int byteCounter = 0; byteCounter < bytes.length; byteCounter++) {
       this.add(bytes[byteCounter]);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOGetters.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOGetters.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOGetters.java
index c6827a0..9f26f22 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOGetters.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOGetters.java
@@ -33,7 +33,9 @@ import com.datatorrent.lib.util.PojoUtils.GetterShort;
  * which need to take POJOs as input and convert them into GPOMutable objects.
  * @since 3.0.0
  */
-public class GPOGetters {
+public class GPOGetters
+{
+
   /**
    * Array of boolean getters.
    */


[13/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/QuotientMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/QuotientMap.java b/library/src/main/java/com/datatorrent/lib/math/QuotientMap.java
index c114bad..a10fe95 100644
--- a/library/src/main/java/com/datatorrent/lib/math/QuotientMap.java
+++ b/library/src/main/java/com/datatorrent/lib/math/QuotientMap.java
@@ -23,11 +23,11 @@ import java.util.Map;
 
 import javax.validation.constraints.Min;
 
-import com.datatorrent.api.annotation.OperatorAnnotation;
 import org.apache.commons.lang.mutable.MutableDouble;
 
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
 
 /**
@@ -63,175 +63,175 @@ import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
  */
 @OperatorAnnotation(partitionable = false)
 public class QuotientMap<K, V extends Number> extends
-		BaseNumberKeyValueOperator<K, V>
+    BaseNumberKeyValueOperator<K, V>
 {
-	/**
-	 * Numerator key/sum value map.
-	 */
-	protected HashMap<K, MutableDouble> numerators = new HashMap<K, MutableDouble>();
-
-	/**
-	 * Denominator key/sum value map.
-	 */
-	protected HashMap<K, MutableDouble> denominators = new HashMap<K, MutableDouble>();
-
-	/**
-	 * Count occurrence of keys if set to true.
-	 */
-	boolean countkey = false;
-
-	/**
-	 * Quotient multiply by value.
-	 */
-	int mult_by = 1;
-
-	/**
-	 * Numerator input port.
-	 */
-	public final transient DefaultInputPort<Map<K, V>> numerator = new DefaultInputPort<Map<K, V>>()
-	{
-		/**
-		 * Added tuple to the numerator hash
-		 */
-		@Override
-		public void process(Map<K, V> tuple)
-		{
-			addTuple(tuple, numerators);
-		}
-	};
-
-	/**
-	 * Denominator input port.
-	 */
-	public final transient DefaultInputPort<Map<K, V>> denominator = new DefaultInputPort<Map<K, V>>()
-	{
-		/**
-		 * Added tuple to the denominator hash
-		 */
-		@Override
-		public void process(Map<K, V> tuple)
-		{
-			addTuple(tuple, denominators);
-		}
-	};
-
-	/**
-	 * Quotient output port.
-	 */
-	public final transient DefaultOutputPort<HashMap<K, Double>> quotient = new DefaultOutputPort<HashMap<K, Double>>();
-
-	/**
-	 * Add tuple to nval/dval map.
-	 * 
-	 * @param tuple
-	 *          key/value map on input port.
-	 * @param map
-	 *          key/summed value map.
-	 */
-	public void addTuple(Map<K, V> tuple, Map<K, MutableDouble> map)
-	{
-		for (Map.Entry<K, V> e : tuple.entrySet()) {
-			addEntry(e.getKey(), e.getValue(), map);
-		}
-	}
-
-	/**
-	 * Add/Update entry to key/sum value map.
-	 * 
-	 * @param key
-	 *          name.
-	 * @param value
-	 *          value for key.
-	 * @param map
-	 *          numerator/denominator key/sum map.
-	 */
-	public void addEntry(K key, V value, Map<K, MutableDouble> map)
-	{
-		if (!doprocessKey(key) || (value == null)) {
-			return;
-		}
-		MutableDouble val = map.get(key);
-		if (val == null) {
-			if (countkey) {
-				val = new MutableDouble(1.00);
-			} else {
-				val = new MutableDouble(value.doubleValue());
-			}
-		} else {
-			if (countkey) {
-				val.increment();
-			} else {
-				val.add(value.doubleValue());
-			}
-		}
-		map.put(cloneKey(key), val);
-	}
-
-	/**
-	 * getter for mult_by
-	 * 
-	 * @return mult_by
-	 */
-
-	@Min(0)
-	public int getMult_by()
-	{
-		return mult_by;
-	}
-
-	/**
-	 * getter for countkey
-	 * 
-	 * @return countkey
-	 */
-	public boolean getCountkey()
-	{
-		return countkey;
-	}
-
-	/**
-	 * Setter for mult_by
-	 * 
-	 * @param i
-	 */
-	public void setMult_by(int i)
-	{
-		mult_by = i;
-	}
-
-	/**
-	 * setter for countkey
-	 * 
-	 * @param i
-	 *          sets countkey
-	 */
-	public void setCountkey(boolean i)
-	{
-		countkey = i;
-	}
-
-	/**
-	 * Generates tuples for each key and emits them. Only keys that are in the
-	 * denominator are iterated on If the key is only in the numerator, it gets
-	 * ignored (cannot do divide by 0) Clears internal data
-	 */
-	@Override
-	public void endWindow()
-	{
-		HashMap<K, Double> tuples = new HashMap<K, Double>();
-		for (Map.Entry<K, MutableDouble> e : denominators.entrySet()) {
-			MutableDouble nval = numerators.get(e.getKey());
-			if (nval == null) {
-				tuples.put(e.getKey(), new Double(0.0));
-			} else {
-				tuples.put(e.getKey(), new Double((nval.doubleValue() / e.getValue()
-						.doubleValue()) * mult_by));
-			}
-		}
-		if (!tuples.isEmpty()) {
-			quotient.emit(tuples);
-		}
-		numerators.clear();
-		denominators.clear();
-	}
+  /**
+   * Numerator key/sum value map.
+   */
+  protected HashMap<K, MutableDouble> numerators = new HashMap<K, MutableDouble>();
+
+  /**
+   * Denominator key/sum value map.
+   */
+  protected HashMap<K, MutableDouble> denominators = new HashMap<K, MutableDouble>();
+
+  /**
+   * Count occurrence of keys if set to true.
+   */
+  boolean countkey = false;
+
+  /**
+   * Quotient multiply by value.
+   */
+  int mult_by = 1;
+
+  /**
+   * Numerator input port.
+   */
+  public final transient DefaultInputPort<Map<K, V>> numerator = new DefaultInputPort<Map<K, V>>()
+  {
+    /**
+     * Added tuple to the numerator hash
+     */
+    @Override
+    public void process(Map<K, V> tuple)
+    {
+      addTuple(tuple, numerators);
+    }
+  };
+
+  /**
+   * Denominator input port.
+   */
+  public final transient DefaultInputPort<Map<K, V>> denominator = new DefaultInputPort<Map<K, V>>()
+  {
+    /**
+     * Added tuple to the denominator hash
+     */
+    @Override
+    public void process(Map<K, V> tuple)
+    {
+      addTuple(tuple, denominators);
+    }
+  };
+
+  /**
+   * Quotient output port.
+   */
+  public final transient DefaultOutputPort<HashMap<K, Double>> quotient = new DefaultOutputPort<HashMap<K, Double>>();
+
+  /**
+   * Add tuple to nval/dval map.
+   *
+   * @param tuple
+   *          key/value map on input port.
+   * @param map
+   *          key/summed value map.
+   */
+  public void addTuple(Map<K, V> tuple, Map<K, MutableDouble> map)
+  {
+    for (Map.Entry<K, V> e : tuple.entrySet()) {
+      addEntry(e.getKey(), e.getValue(), map);
+    }
+  }
+
+  /**
+   * Add/Update entry to key/sum value map.
+   *
+   * @param key
+   *          name.
+   * @param value
+   *          value for key.
+   * @param map
+   *          numerator/denominator key/sum map.
+   */
+  public void addEntry(K key, V value, Map<K, MutableDouble> map)
+  {
+    if (!doprocessKey(key) || (value == null)) {
+      return;
+    }
+    MutableDouble val = map.get(key);
+    if (val == null) {
+      if (countkey) {
+        val = new MutableDouble(1.00);
+      } else {
+        val = new MutableDouble(value.doubleValue());
+      }
+    } else {
+      if (countkey) {
+        val.increment();
+      } else {
+        val.add(value.doubleValue());
+      }
+    }
+    map.put(cloneKey(key), val);
+  }
+
+  /**
+   * getter for mult_by
+   *
+   * @return mult_by
+   */
+
+  @Min(0)
+  public int getMult_by()
+  {
+    return mult_by;
+  }
+
+  /**
+   * getter for countkey
+   *
+   * @return countkey
+   */
+  public boolean getCountkey()
+  {
+    return countkey;
+  }
+
+  /**
+   * Setter for mult_by
+   *
+   * @param i
+   */
+  public void setMult_by(int i)
+  {
+    mult_by = i;
+  }
+
+  /**
+   * setter for countkey
+   *
+   * @param i
+   *          sets countkey
+   */
+  public void setCountkey(boolean i)
+  {
+    countkey = i;
+  }
+
+  /**
+   * Generates tuples for each key and emits them. Only keys that are in the
+   * denominator are iterated on If the key is only in the numerator, it gets
+   * ignored (cannot do divide by 0) Clears internal data
+   */
+  @Override
+  public void endWindow()
+  {
+    HashMap<K, Double> tuples = new HashMap<K, Double>();
+    for (Map.Entry<K, MutableDouble> e : denominators.entrySet()) {
+      MutableDouble nval = numerators.get(e.getKey());
+      if (nval == null) {
+        tuples.put(e.getKey(), new Double(0.0));
+      } else {
+        tuples.put(e.getKey(), new Double((nval.doubleValue() / e.getValue()
+            .doubleValue()) * mult_by));
+      }
+    }
+    if (!tuples.isEmpty()) {
+      quotient.emit(tuples);
+    }
+    numerators.clear();
+    denominators.clear();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Range.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Range.java b/library/src/main/java/com/datatorrent/lib/math/Range.java
index 9a7ab7a..ad54d4d 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Range.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Range.java
@@ -43,62 +43,62 @@ import com.datatorrent.lib.util.UnifierRange;
  */
 public class Range<V extends Number> extends BaseNumberValueOperator<V>
 {
-	/**
-	 * Highest value on input port.
-	 */
-	protected V high = null;
+  /**
+   * Highest value on input port.
+   */
+  protected V high = null;
 
-	/**
-	 * Lowest value on input port.
-	 */
-	protected V low = null;
+  /**
+   * Lowest value on input port.
+   */
+  protected V low = null;
 
-	/**
-	 * Input data port.
-	 */
-	public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
-	{
-		/**
-		 * Process each tuple to compute new high and low
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			if ((low == null) || (low.doubleValue() > tuple.doubleValue())) {
-				low = tuple;
-			}
+  /**
+   * Input data port.
+   */
+  public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
+  {
+    /**
+     * Process each tuple to compute new high and low
+     */
+    @Override
+    public void process(V tuple)
+    {
+      if ((low == null) || (low.doubleValue() > tuple.doubleValue())) {
+        low = tuple;
+      }
 
-			if ((high == null) || (high.doubleValue() < tuple.doubleValue())) {
-				high = tuple;
-			}
-		}
-	};
+      if ((high == null) || (high.doubleValue() < tuple.doubleValue())) {
+        high = tuple;
+      }
+    }
+  };
 
-	/**
-	 * Output range port, which emits high low unifier operator.
-	 */
-	public final transient DefaultOutputPort<HighLow<V>> range = new DefaultOutputPort<HighLow<V>>()
-	{
-		@Override
-		public Unifier<HighLow<V>> getUnifier()
-		{
-			return new UnifierRange<V>();
-		}
-	};
+  /**
+   * Output range port, which emits high low unifier operator.
+   */
+  public final transient DefaultOutputPort<HighLow<V>> range = new DefaultOutputPort<HighLow<V>>()
+  {
+    @Override
+    public Unifier<HighLow<V>> getUnifier()
+    {
+      return new UnifierRange<V>();
+    }
+  };
 
-	/**
-	 * Emits the range. If no tuple was received in the window, no emit is done
-	 * Clears the internal data before return
-	 */
-	@Override
-	public void endWindow()
-	{
-		if ((low != null) && (high != null)) {
-			HighLow tuple = new HighLow(getValue(high.doubleValue()),
-					getValue(low.doubleValue()));
-			range.emit(tuple);
-		}
-		high = null;
-		low = null;
-	}
+  /**
+   * Emits the range. If no tuple was received in the window, no emit is done
+   * Clears the internal data before return
+   */
+  @Override
+  public void endWindow()
+  {
+    if ((low != null) && (high != null)) {
+      HighLow tuple = new HighLow(getValue(high.doubleValue()),
+          getValue(low.doubleValue()));
+      range.emit(tuple);
+    }
+    high = null;
+    low = null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/RangeKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/RangeKeyVal.java b/library/src/main/java/com/datatorrent/lib/math/RangeKeyVal.java
index a006415..241f482 100644
--- a/library/src/main/java/com/datatorrent/lib/math/RangeKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/math/RangeKeyVal.java
@@ -21,15 +21,14 @@ package com.datatorrent.lib.math;
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.StreamCodec;
 import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
 import com.datatorrent.lib.util.HighLow;
 import com.datatorrent.lib.util.KeyValPair;
 import com.datatorrent.lib.util.UnifierKeyValRange;
 
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.StreamCodec;
-
 /**
  *  This operator emits the range for each key at the end of window.
  * <p>
@@ -50,83 +49,82 @@ import com.datatorrent.api.StreamCodec;
  * @tags range, number, comparison, key value
  * @since 0.3.3
  */
-public class RangeKeyVal<K, V extends Number> extends
-		BaseNumberKeyValueOperator<K, V>
+public class RangeKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K, V>
 {
 
-	/**
-	 * key/high value map.
-	 */
-	protected HashMap<K, V> high = new HashMap<K, V>();
+  /**
+   * key/high value map.
+   */
+  protected HashMap<K, V> high = new HashMap<K, V>();
 
-	/**
-	 * key/low value map.
-	 */
-	protected HashMap<K, V> low = new HashMap<K, V>();
+  /**
+   * key/low value map.
+   */
+  protected HashMap<K, V> low = new HashMap<K, V>();
 
-	/**
-	 *  Input port that takes a key value pair.
-	 */
-	public final transient DefaultInputPort<KeyValPair<K, V>> data = new DefaultInputPort<KeyValPair<K, V>>()
-	{
-		/**
-		 * Process each key and computes new high and low.
-		 */
-		@Override
-		public void process(KeyValPair<K, V> tuple)
-		{
-			K key = tuple.getKey();
-			if (!doprocessKey(key) || (tuple.getValue() == null)) {
-				return;
-			}
-			V val = low.get(key);
-			V eval = tuple.getValue();
-			if ((val == null) || (val.doubleValue() > eval.doubleValue())) {
-				low.put(cloneKey(key), eval);
-			}
+  /**
+   *  Input port that takes a key value pair.
+   */
+  public final transient DefaultInputPort<KeyValPair<K, V>> data = new DefaultInputPort<KeyValPair<K, V>>()
+  {
+    /**
+     * Process each key and computes new high and low.
+     */
+    @Override
+    public void process(KeyValPair<K, V> tuple)
+    {
+      K key = tuple.getKey();
+      if (!doprocessKey(key) || (tuple.getValue() == null)) {
+        return;
+      }
+      V val = low.get(key);
+      V eval = tuple.getValue();
+      if ((val == null) || (val.doubleValue() > eval.doubleValue())) {
+        low.put(cloneKey(key), eval);
+      }
 
-			val = high.get(key);
-			if ((val == null) || (val.doubleValue() < eval.doubleValue())) {
-				high.put(cloneKey(key), eval);
-			}
-		}
+      val = high.get(key);
+      if ((val == null) || (val.doubleValue() < eval.doubleValue())) {
+        high.put(cloneKey(key), eval);
+      }
+    }
 
-		@Override
-		public StreamCodec<KeyValPair<K, V>> getStreamCodec()
-		{
-			return getKeyValPairStreamCodec();
-		}
-	};
+    @Override
+    public StreamCodec<KeyValPair<K, V>> getStreamCodec()
+    {
+      return getKeyValPairStreamCodec();
+    }
+  };
 
-	/**
-	 * Range output port to send out the high low range.
-	 */
-	public final transient DefaultOutputPort<KeyValPair<K, HighLow<V>>> range = new DefaultOutputPort<KeyValPair<K, HighLow<V>>>()
-	{
-		@Override
-		public Unifier<KeyValPair<K, HighLow<V>>> getUnifier()
-		{
-			return new UnifierKeyValRange<K,V>();
-		}
-	};
+  /**
+   * Range output port to send out the high low range.
+   */
+  public final transient DefaultOutputPort<KeyValPair<K, HighLow<V>>> range = new DefaultOutputPort<KeyValPair<K, HighLow<V>>>()
+  {
+    @Override
+    public Unifier<KeyValPair<K, HighLow<V>>> getUnifier()
+    {
+      return new UnifierKeyValRange<K,V>();
+    }
+  };
 
-	/**
-	 * Emits range for each key. If no data is received, no emit is done Clears
-	 * the internal data before return
-	 */
-	@Override
-	public void endWindow()
-	{
-		for (Map.Entry<K, V> e : high.entrySet()) {
-			range.emit(new KeyValPair<K, HighLow<V>>(e.getKey(), new HighLow(e
-					.getValue(), low.get(e.getKey()))));
-		}
-		clearCache();
-	}
+  /**
+   * Emits range for each key. If no data is received, no emit is done Clears
+   * the internal data before return
+   */
+  @Override
+  public void endWindow()
+  {
+    for (Map.Entry<K, V> e : high.entrySet()) {
+      range.emit(new KeyValPair<K, HighLow<V>>(e.getKey(), new HighLow(e
+          .getValue(), low.get(e.getKey()))));
+    }
+    clearCache();
+  }
 
-	public void clearCache()
-	{
-		high.clear();
-		low.clear();
-	}
+  public void clearCache()
+  {
+    high.clear();
+    low.clear();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java b/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java
index 24f09f9..286d72e 100644
--- a/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java
+++ b/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java
@@ -18,10 +18,10 @@
  */
 package com.datatorrent.lib.math;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OperatorAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * Calculate the running average of the input numbers and emit it at the end of the window. 
@@ -46,70 +46,70 @@ import com.datatorrent.api.annotation.OperatorAnnotation;
 @OperatorAnnotation(partitionable = false)
 public class RunningAverage extends BaseOperator
 {
-	/**
-	 * Computed average.
-	 */
-	double average;
+  /**
+   * Computed average.
+   */
+  double average;
 
-	/**
-	 * Number of values on input port.
-	 */
-	long count;
+  /**
+   * Number of values on input port.
+   */
+  long count;
 
-	/**
-	 * Input number port.
-	 */
-	public final transient DefaultInputPort<Number> input = new DefaultInputPort<Number>()
-	{
-		@Override
-		public void process(Number tuple)
-		{
-			double sum = (count * average) + tuple.doubleValue();
-			count++;
-			average = sum / count;
-		}
-	};
+  /**
+   * Input number port.
+   */
+  public final transient DefaultInputPort<Number> input = new DefaultInputPort<Number>()
+  {
+    @Override
+    public void process(Number tuple)
+    {
+      double sum = (count * average) + tuple.doubleValue();
+      count++;
+      average = sum / count;
+    }
+  };
 
-	/**
-	 * Double average output port.
-	 */
-	public final transient DefaultOutputPort<Double> doubleAverage = new DefaultOutputPort<Double>();
+  /**
+   * Double average output port.
+   */
+  public final transient DefaultOutputPort<Double> doubleAverage = new DefaultOutputPort<Double>();
 
-	/**
-	 * Float average output port.
-	 */
-	public final transient DefaultOutputPort<Float> floatAverage = new DefaultOutputPort<Float>();
+  /**
+   * Float average output port.
+   */
+  public final transient DefaultOutputPort<Float> floatAverage = new DefaultOutputPort<Float>();
 
-	/**
-	 * Long average output port.
-	 */
-	public final transient DefaultOutputPort<Long> longAverage = new DefaultOutputPort<Long>();
+  /**
+   * Long average output port.
+   */
+  public final transient DefaultOutputPort<Long> longAverage = new DefaultOutputPort<Long>();
 
-	/**
-	 * Integer average output port.
-	 */
-	public final transient DefaultOutputPort<Integer> integerAverage = new DefaultOutputPort<Integer>();
+  /**
+   * Integer average output port.
+   */
+  public final transient DefaultOutputPort<Integer> integerAverage = new DefaultOutputPort<Integer>();
 
-	/**
-	 * End window operator override.
-	 */
-	@Override
-	public void endWindow()
-	{
-		if (doubleAverage.isConnected()) {
-			doubleAverage.emit(average);
-		}
+  /**
+   * End window operator override.
+   */
+  @Override
+  public void endWindow()
+  {
+    if (doubleAverage.isConnected()) {
+      doubleAverage.emit(average);
+    }
 
-		if (floatAverage.isConnected()) {
-			floatAverage.emit((float) average);
-		}
+    if (floatAverage.isConnected()) {
+      floatAverage.emit((float)average);
+    }
 
-		if (longAverage.isConnected()) {
-			longAverage.emit((long) average);
-		}
+    if (longAverage.isConnected()) {
+      longAverage.emit((long)average);
+    }
 
-		if (integerAverage.isConnected()) {
-			integerAverage.emit((int) average);
-		}
-	}
+    if (integerAverage.isConnected()) {
+      integerAverage.emit((int)average);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Sigma.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Sigma.java b/library/src/main/java/com/datatorrent/lib/math/Sigma.java
index 7355238..6bfb9cf 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Sigma.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Sigma.java
@@ -18,10 +18,10 @@
  */
 package com.datatorrent.lib.math;
 
-import com.datatorrent.api.annotation.OperatorAnnotation;
-
 import java.util.Collection;
 
+import com.datatorrent.api.annotation.OperatorAnnotation;
+
 /**
  * Adds incoming tuple to the state and emits the result of each addition on the respective ports.
  * <p>
@@ -47,27 +47,27 @@ import java.util.Collection;
 @OperatorAnnotation(partitionable = false)
 public class Sigma<T extends Number> extends AbstractAggregateCalc<T>
 {
-	@Override
-	public long aggregateLongs(Collection<T> collection)
-	{
-		long l = 0;
+  @Override
+  public long aggregateLongs(Collection<T> collection)
+  {
+    long l = 0;
 
-		for (Number n : collection) {
-			l += n.longValue();
-		}
+    for (Number n : collection) {
+      l += n.longValue();
+    }
 
-		return l;
-	}
+    return l;
+  }
 
-	@Override
-	public double aggregateDoubles(Collection<T> collection)
-	{
-		double d = 0;
+  @Override
+  public double aggregateDoubles(Collection<T> collection)
+  {
+    double d = 0;
 
-		for (Number n : collection) {
-			d += n.doubleValue();
-		}
+    for (Number n : collection) {
+      d += n.doubleValue();
+    }
 
-		return d;
-	}
+    return d;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/Sum.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Sum.java b/library/src/main/java/com/datatorrent/lib/math/Sum.java
index 38be2fc..0f5e64f 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Sum.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Sum.java
@@ -56,204 +56,204 @@ import com.datatorrent.lib.util.UnifierSumNumber;
  * @since 0.3.3
  */
 public class Sum<V extends Number> extends BaseNumberValueOperator<V> implements
-		Unifier<V>
+    Unifier<V>
 {
-	/**
-	 * Sum value.
-	 */
-	protected double sums = 0;
+  /**
+   * Sum value.
+   */
+  protected double sums = 0;
 
-	/**
-	 * Input tuple processed flag.
-	 */
-	protected boolean tupleAvailable = false;
+  /**
+   * Input tuple processed flag.
+   */
+  protected boolean tupleAvailable = false;
 
-	/**
-	 * Accumulate sum flag.
-	 */
-	protected boolean cumulative = false;
+  /**
+   * Accumulate sum flag.
+   */
+  protected boolean cumulative = false;
 
-	/**
-	 * Input port to receive data.&nbsp; It computes sum and count for each tuple.
-	 */
-	public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
-	{
-		/**
-		 * Computes sum and count with each tuple
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			Sum.this.process(tuple);
-			tupleAvailable = true;
-		}
-	};
+  /**
+   * Input port to receive data.&nbsp; It computes sum and count for each tuple.
+   */
+  public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
+  {
+    /**
+     * Computes sum and count with each tuple
+     */
+    @Override
+    public void process(V tuple)
+    {
+      Sum.this.process(tuple);
+      tupleAvailable = true;
+    }
+  };
 
-	/**
-	 * Unifier process override.
-	 */
-	@Override
-	public void process(V tuple)
-	{
-		sums += tuple.doubleValue();
-		tupleAvailable = true; // also need to set here for Unifier
-	}
+  /**
+   * Unifier process override.
+   */
+  @Override
+  public void process(V tuple)
+  {
+    sums += tuple.doubleValue();
+    tupleAvailable = true; // also need to set here for Unifier
+  }
 
-	/**
-	 * Output sum port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<V> sum = new DefaultOutputPort<V>()
-	{
-		@Override
-		public Unifier<V> getUnifier()
-		{
-			UnifierSumNumber<V> ret = new UnifierSumNumber<V>();
-			ret.setVType(getType());
-			return ret;
-		}
-	};
+  /**
+   * Output sum port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<V> sum = new DefaultOutputPort<V>()
+  {
+    @Override
+    public Unifier<V> getUnifier()
+    {
+      UnifierSumNumber<V> ret = new UnifierSumNumber<V>();
+      ret.setVType(getType());
+      return ret;
+    }
+  };
 
-	/**
-	 * Output double sum port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Double> sumDouble = new DefaultOutputPort<Double>()
-	{
-		@Override
-		public Unifier<Double> getUnifier()
-		{
-			UnifierSumNumber<Double> ret = new UnifierSumNumber<Double>();
-			ret.setType(Double.class);
-			return ret;
-		}
-	};
+  /**
+   * Output double sum port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Double> sumDouble = new DefaultOutputPort<Double>()
+  {
+    @Override
+    public Unifier<Double> getUnifier()
+    {
+      UnifierSumNumber<Double> ret = new UnifierSumNumber<Double>();
+      ret.setType(Double.class);
+      return ret;
+    }
+  };
 
-	/**
-	 * Output integer sum port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Integer> sumInteger = new DefaultOutputPort<Integer>()
-	{
-		@Override
-		public Unifier<Integer> getUnifier()
-		{
-			UnifierSumNumber<Integer> ret = new UnifierSumNumber<Integer>();
-			ret.setType(Integer.class);
-			return ret;
-		}
-	};
+  /**
+   * Output integer sum port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Integer> sumInteger = new DefaultOutputPort<Integer>()
+  {
+    @Override
+    public Unifier<Integer> getUnifier()
+    {
+      UnifierSumNumber<Integer> ret = new UnifierSumNumber<Integer>();
+      ret.setType(Integer.class);
+      return ret;
+    }
+  };
 
-	/**
-	 * Output Long sum port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Long> sumLong = new DefaultOutputPort<Long>()
-	{
-		@Override
-		public Unifier<Long> getUnifier()
-		{
-			UnifierSumNumber<Long> ret = new UnifierSumNumber<Long>();
-			ret.setType(Long.class);
-			return ret;
-		}
-	};
+  /**
+   * Output Long sum port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Long> sumLong = new DefaultOutputPort<Long>()
+  {
+    @Override
+    public Unifier<Long> getUnifier()
+    {
+      UnifierSumNumber<Long> ret = new UnifierSumNumber<Long>();
+      ret.setType(Long.class);
+      return ret;
+    }
+  };
 
-	/**
-	 * Output short sum port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Short> sumShort = new DefaultOutputPort<Short>()
-	{
-		@Override
-		public Unifier<Short> getUnifier()
-		{
-			UnifierSumNumber<Short> ret = new UnifierSumNumber<Short>();
-			ret.setType(Short.class);
-			return ret;
-		}
-	};
+  /**
+   * Output short sum port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Short> sumShort = new DefaultOutputPort<Short>()
+  {
+    @Override
+    public Unifier<Short> getUnifier()
+    {
+      UnifierSumNumber<Short> ret = new UnifierSumNumber<Short>();
+      ret.setType(Short.class);
+      return ret;
+    }
+  };
 
-	/**
-	 * Output float sum port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Float> sumFloat = new DefaultOutputPort<Float>()
-	{
-		@Override
-		public Unifier<Float> getUnifier()
-		{
-			UnifierSumNumber<Float> ret = new UnifierSumNumber<Float>();
-			ret.setType(Float.class);
-			return ret;
-		}
-	};
+  /**
+   * Output float sum port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Float> sumFloat = new DefaultOutputPort<Float>()
+  {
+    @Override
+    public Unifier<Float> getUnifier()
+    {
+      UnifierSumNumber<Float> ret = new UnifierSumNumber<Float>();
+      ret.setType(Float.class);
+      return ret;
+    }
+  };
 
-	/**
-	 * Redis server output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<Map<Integer, Integer>> redisport = new DefaultOutputPort<Map<Integer, Integer>>();
+  /**
+   * Redis server output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<Map<Integer, Integer>> redisport = new DefaultOutputPort<Map<Integer, Integer>>();
 
-	/**
-	 * Check if sum has to be cumulative.
-	 * 
-	 * @return cumulative flag
-	 */
-	public boolean isCumulative()
-	{
-		return cumulative;
-	}
+  /**
+   * Check if sum has to be cumulative.
+   *
+   * @return cumulative flag
+   */
+  public boolean isCumulative()
+  {
+    return cumulative;
+  }
 
-	/**
-	 * Set cumulative flag.
-	 * 
-	 * @param cumulative
-	 *          flag
-	 */
-	public void setCumulative(boolean cumulative)
-	{
-		this.cumulative = cumulative;
-	}
+  /**
+   * Set cumulative flag.
+   *
+   * @param cumulative
+   *          flag
+   */
+  public void setCumulative(boolean cumulative)
+  {
+    this.cumulative = cumulative;
+  }
 
-	/**
-	 * Emits sum and count if ports are connected
-	 */
-	@Override
-	public void endWindow()
-	{
-		if (doEmit()) {
-			sum.emit(getValue(sums));
-			sumDouble.emit(sums);
-			sumInteger.emit((int) sums);
-			sumLong.emit((long) sums);
-			sumShort.emit((short) sums);
-			sumFloat.emit((float) sums);
-			tupleAvailable = false;
-			Map<Integer, Integer> redis = new HashMap<Integer, Integer>();
-			redis.put(1, (int) sums);
-			redisport.emit(redis);
-		}
-		clearCache();
-	}
+  /**
+   * Emits sum and count if ports are connected
+   */
+  @Override
+  public void endWindow()
+  {
+    if (doEmit()) {
+      sum.emit(getValue(sums));
+      sumDouble.emit(sums);
+      sumInteger.emit((int)sums);
+      sumLong.emit((long)sums);
+      sumShort.emit((short)sums);
+      sumFloat.emit((float)sums);
+      tupleAvailable = false;
+      Map<Integer, Integer> redis = new HashMap<Integer, Integer>();
+      redis.put(1, (int)sums);
+      redisport.emit(redis);
+    }
+    clearCache();
+  }
 
-	/**
-	 * Clears the cache making this operator stateless on window boundary
-	 */
-	private void clearCache()
-	{
-		if (!cumulative) {
-			sums = 0;
-		}
-	}
+  /**
+   * Clears the cache making this operator stateless on window boundary
+   */
+  private void clearCache()
+  {
+    if (!cumulative) {
+      sums = 0;
+    }
+  }
 
-	/**
-	 * Decides whether emit has to be done in this window on port "sum"
-	 * 
-	 * @return true is sum port is connected
-	 */
-	private boolean doEmit()
-	{
-		return tupleAvailable;
-	}
+  /**
+   * Decides whether emit has to be done in this window on port "sum"
+   *
+   * @return true is sum port is connected
+   */
+  private boolean doEmit()
+  {
+    return tupleAvailable;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/SumCountMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/SumCountMap.java b/library/src/main/java/com/datatorrent/lib/math/SumCountMap.java
index 76242f4..c2d8465 100644
--- a/library/src/main/java/com/datatorrent/lib/math/SumCountMap.java
+++ b/library/src/main/java/com/datatorrent/lib/math/SumCountMap.java
@@ -18,16 +18,18 @@
  */
 package com.datatorrent.lib.math;
 
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.lang.mutable.MutableDouble;
+import org.apache.commons.lang.mutable.MutableInt;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
 import com.datatorrent.lib.util.UnifierHashMapInteger;
 import com.datatorrent.lib.util.UnifierHashMapSumKeys;
-import java.util.HashMap;
-import java.util.Map;
-import org.apache.commons.lang.mutable.MutableDouble;
-import org.apache.commons.lang.mutable.MutableInt;
 
 /**
  * Emits the sum and count of values for each key at the end of window.
@@ -58,244 +60,244 @@ import org.apache.commons.lang.mutable.MutableInt;
  * @since 0.3.3
  */
 public class SumCountMap<K, V extends Number> extends
-		BaseNumberKeyValueOperator<K, V>
+    BaseNumberKeyValueOperator<K, V>
 {
-	/**
-	 * Key/double sum map.
-	 */
-	protected HashMap<K, MutableDouble> sums = new HashMap<K, MutableDouble>();
+  /**
+   * Key/double sum map.
+   */
+  protected HashMap<K, MutableDouble> sums = new HashMap<K, MutableDouble>();
+
+  /**
+   * Key/integer sum map.
+   */
+  protected HashMap<K, MutableInt> counts = new HashMap<K, MutableInt>();
 
-	/**
-	 * Key/integer sum map.
-	 */
-	protected HashMap<K, MutableInt> counts = new HashMap<K, MutableInt>();
+  /**
+   * Cumulative sum flag.
+   */
+  protected boolean cumulative = false;
 
-	/**
-	 * Cumulative sum flag.
-	 */
-	protected boolean cumulative = false;
+  /**
+   * Input port that takes a map.&nbsp; It adds the values for each key and counts the number of occurrences for each key.
+   */
+  public final transient DefaultInputPort<Map<K, V>> data = new DefaultInputPort<Map<K, V>>()
+  {
+    /**
+     * For each tuple (a HashMap of keys,val pairs) Adds the values for each
+     * key, Counts the number of occurrences of each key
+     */
+    @Override
+    public void process(Map<K, V> tuple)
+    {
+      for (Map.Entry<K, V> e : tuple.entrySet()) {
+        K key = e.getKey();
+        if (!doprocessKey(key)) {
+          continue;
+        }
+        if (sum.isConnected()) {
+          MutableDouble val = sums.get(key);
+          if (val == null) {
+            val = new MutableDouble(e.getValue().doubleValue());
+          } else {
+            val.add(e.getValue().doubleValue());
+          }
+          sums.put(cloneKey(key), val);
+        }
+        if (SumCountMap.this.count.isConnected()) {
+          MutableInt count = counts.get(key);
+          if (count == null) {
+            count = new MutableInt(0);
+            counts.put(cloneKey(key), count);
+          }
+          count.increment();
+        }
+      }
+    }
+  };
 
-	/**
-	 * Input port that takes a map.&nbsp; It adds the values for each key and counts the number of occurrences for each key.
-	 */
-	public final transient DefaultInputPort<Map<K, V>> data = new DefaultInputPort<Map<K, V>>()
-	{
-		/**
-		 * For each tuple (a HashMap of keys,val pairs) Adds the values for each
-		 * key, Counts the number of occurrences of each key
-		 */
-		@Override
-		public void process(Map<K, V> tuple)
-		{
-			for (Map.Entry<K, V> e : tuple.entrySet()) {
-				K key = e.getKey();
-				if (!doprocessKey(key)) {
-					continue;
-				}
-				if (sum.isConnected()) {
-					MutableDouble val = sums.get(key);
-					if (val == null) {
-						val = new MutableDouble(e.getValue().doubleValue());
-					} else {
-						val.add(e.getValue().doubleValue());
-					}
-					sums.put(cloneKey(key), val);
-				}
-				if (SumCountMap.this.count.isConnected()) {
-					MutableInt count = counts.get(key);
-					if (count == null) {
-						count = new MutableInt(0);
-						counts.put(cloneKey(key), count);
-					}
-					count.increment();
-				}
-			}
-		}
-	};
+  /**
+   * Key,sum map output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<HashMap<K, V>> sum = new DefaultOutputPort<HashMap<K, V>>()
+  {
+    @Override
+    public Unifier<HashMap<K, V>> getUnifier()
+    {
+      return new UnifierHashMapSumKeys<K, V>();
+    }
+  };
 
-	/**
-	 * Key,sum map output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<HashMap<K, V>> sum = new DefaultOutputPort<HashMap<K, V>>()
-	{
-		@Override
-		public Unifier<HashMap<K, V>> getUnifier()
-		{
-			return new UnifierHashMapSumKeys<K, V>();
-		}
-	};
+  /**
+   * Key,double sum map output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<HashMap<K, Double>> sumDouble = new DefaultOutputPort<HashMap<K, Double>>()
+  {
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public Unifier<HashMap<K, Double>> getUnifier()
+    {
+      UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Double>();
+      ret.setType(Double.class);
+      return ret;
+    }
+  };
 
-	/**
-	 * Key,double sum map output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<HashMap<K, Double>> sumDouble = new DefaultOutputPort<HashMap<K, Double>>()
-	{
-		@SuppressWarnings({ "rawtypes", "unchecked" })
-		@Override
-		public Unifier<HashMap<K, Double>> getUnifier()
-		{
-			UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Double>();
-			ret.setType(Double.class);
-			return ret;
-		}
-	};
+  /**
+   * Key,integer sum output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<HashMap<K, Integer>> sumInteger = new DefaultOutputPort<HashMap<K, Integer>>()
+  {
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public Unifier<HashMap<K, Integer>> getUnifier()
+    {
+      UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Integer>();
+      ret.setType(Integer.class);
+      return ret;
+    }
+  };
 
-	/**
-	 * Key,integer sum output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<HashMap<K, Integer>> sumInteger = new DefaultOutputPort<HashMap<K, Integer>>()
-	{
-		@SuppressWarnings({ "rawtypes", "unchecked" })
-		@Override
-		public Unifier<HashMap<K, Integer>> getUnifier()
-		{
-			UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Integer>();
-			ret.setType(Integer.class);
-			return ret;
-		}
-	};
 
-	
         /**
-	 * Key,long sum output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<HashMap<K, Long>> sumLong = new DefaultOutputPort<HashMap<K, Long>>()
-	{
-		@SuppressWarnings({ "rawtypes", "unchecked" })
-		@Override
-		public Unifier<HashMap<K, Long>> getUnifier()
-		{
-			UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Long>();
-			ret.setType(Long.class);
-			return ret;
-		}
-	};
+   * Key,long sum output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<HashMap<K, Long>> sumLong = new DefaultOutputPort<HashMap<K, Long>>()
+  {
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public Unifier<HashMap<K, Long>> getUnifier()
+    {
+      UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Long>();
+      ret.setType(Long.class);
+      return ret;
+    }
+  };
         
         /**
-	 * Key,short sum output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<HashMap<K, Short>> sumShort = new DefaultOutputPort<HashMap<K, Short>>()
-	{
-		@SuppressWarnings({ "rawtypes", "unchecked" })
-		@Override
-		public Unifier<HashMap<K, Short>> getUnifier()
-		{
-			UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Short>();
-			ret.setType(Short.class);
-			return ret;
-		}
-	};
+   * Key,short sum output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<HashMap<K, Short>> sumShort = new DefaultOutputPort<HashMap<K, Short>>()
+  {
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public Unifier<HashMap<K, Short>> getUnifier()
+    {
+      UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Short>();
+      ret.setType(Short.class);
+      return ret;
+    }
+  };
         
         /**
-	 * Key,float sum output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<HashMap<K, Float>> sumFloat = new DefaultOutputPort<HashMap<K, Float>>()
-	{
-		@SuppressWarnings({ "rawtypes", "unchecked" })
-		@Override
-		public Unifier<HashMap<K, Float>> getUnifier()
-		{
-			UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Float>();
-			ret.setType(Float.class);
-			return ret;
-		}
-	};
+   * Key,float sum output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<HashMap<K, Float>> sumFloat = new DefaultOutputPort<HashMap<K, Float>>()
+  {
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public Unifier<HashMap<K, Float>> getUnifier()
+    {
+      UnifierHashMapSumKeys ret = new UnifierHashMapSumKeys<K, Float>();
+      ret.setType(Float.class);
+      return ret;
+    }
+  };
         
         /**
-	 * Key,integer sum output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<HashMap<K, Integer>> count = new DefaultOutputPort<HashMap<K, Integer>>()
-	{
-		@Override
-		public Unifier<HashMap<K, Integer>> getUnifier()
-		{
-			return new UnifierHashMapInteger<K>();
-		}
-	};
+   * Key,integer sum output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<HashMap<K, Integer>> count = new DefaultOutputPort<HashMap<K, Integer>>()
+  {
+    @Override
+    public Unifier<HashMap<K, Integer>> getUnifier()
+    {
+      return new UnifierHashMapInteger<K>();
+    }
+  };
 
-	/**
-	 * Get cumulative flag.
-	 * 
-	 * @return cumulative flag
-	 */
-	public boolean isCumulative()
-	{
-		return cumulative;
-	}
+  /**
+   * Get cumulative flag.
+   *
+   * @return cumulative flag
+   */
+  public boolean isCumulative()
+  {
+    return cumulative;
+  }
 
-	/**
-	 * set cumulative flag.
-	 * 
-	 * @param cumulative
-	 *          input flag
-	 */
-	public void setCumulative(boolean cumulative)
-	{
-		this.cumulative = cumulative;
-	}
+  /**
+   * set cumulative flag.
+   *
+   * @param cumulative
+   *          input flag
+   */
+  public void setCumulative(boolean cumulative)
+  {
+    this.cumulative = cumulative;
+  }
 
-	/**
-	 * Emits on all ports that are connected. Data is precomputed during process
-	 * on input port endWindow just emits it for each key Clears the internal data
-	 * before return
-	 */
-	@Override
-	public void endWindow()
-	{
+  /**
+   * Emits on all ports that are connected. Data is precomputed during process
+   * on input port endWindow just emits it for each key Clears the internal data
+   * before return
+   */
+  @Override
+  public void endWindow()
+  {
 
-		// Should allow users to send each key as a separate tuple to load balance
-		// This is an aggregate node, so load balancing would most likely not be
-		// needed
+    // Should allow users to send each key as a separate tuple to load balance
+    // This is an aggregate node, so load balancing would most likely not be
+    // needed
 
-		HashMap<K, V> tuples = new HashMap<K, V>();
-		HashMap<K, Integer> ctuples = new HashMap<K, Integer>();
-		HashMap<K, Double> dtuples = new HashMap<K, Double>();
-		HashMap<K, Integer> ituples = new HashMap<K, Integer>();
-		HashMap<K, Float> ftuples = new HashMap<K, Float>();
-		HashMap<K, Long> ltuples = new HashMap<K, Long>();
-		HashMap<K, Short> stuples = new HashMap<K, Short>();
+    HashMap<K, V> tuples = new HashMap<K, V>();
+    HashMap<K, Integer> ctuples = new HashMap<K, Integer>();
+    HashMap<K, Double> dtuples = new HashMap<K, Double>();
+    HashMap<K, Integer> ituples = new HashMap<K, Integer>();
+    HashMap<K, Float> ftuples = new HashMap<K, Float>();
+    HashMap<K, Long> ltuples = new HashMap<K, Long>();
+    HashMap<K, Short> stuples = new HashMap<K, Short>();
 
-		for (Map.Entry<K, MutableDouble> e : sums.entrySet()) {
-			K key = e.getKey();
-			MutableDouble val = e.getValue();
-			tuples.put(key, getValue(val.doubleValue()));
-			dtuples.put(key, val.doubleValue());
-			ituples.put(key, val.intValue());
-			ftuples.put(key, val.floatValue());
-			ltuples.put(key, val.longValue());
-			stuples.put(key, val.shortValue());
-			// ctuples.put(key, counts.get(e.getKey()).toInteger());
-			MutableInt c = counts.get(e.getKey());
-			if (c != null) {
-				ctuples.put(key, c.toInteger());
-			}
-		}
+    for (Map.Entry<K, MutableDouble> e : sums.entrySet()) {
+      K key = e.getKey();
+      MutableDouble val = e.getValue();
+      tuples.put(key, getValue(val.doubleValue()));
+      dtuples.put(key, val.doubleValue());
+      ituples.put(key, val.intValue());
+      ftuples.put(key, val.floatValue());
+      ltuples.put(key, val.longValue());
+      stuples.put(key, val.shortValue());
+      // ctuples.put(key, counts.get(e.getKey()).toInteger());
+      MutableInt c = counts.get(e.getKey());
+      if (c != null) {
+        ctuples.put(key, c.toInteger());
+      }
+    }
 
-		sum.emit(tuples);
-		sumDouble.emit(dtuples);
-		sumInteger.emit(ituples);
-		sumLong.emit(ltuples);
-		sumShort.emit(stuples);
-		sumFloat.emit(ftuples);
-		count.emit(ctuples);
-		clearCache();
-	}
+    sum.emit(tuples);
+    sumDouble.emit(dtuples);
+    sumInteger.emit(ituples);
+    sumLong.emit(ltuples);
+    sumShort.emit(stuples);
+    sumFloat.emit(ftuples);
+    count.emit(ctuples);
+    clearCache();
+  }
 
-	/**
-	 * Clear sum maps.
-	 */
-	private void clearCache()
-	{
-		if (!cumulative) {
-			sums.clear();
-			counts.clear();
-		}
-	}
+  /**
+   * Clear sum maps.
+   */
+  private void clearCache()
+  {
+    if (!cumulative) {
+      sums.clear();
+      counts.clear();
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/SumKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/SumKeyVal.java b/library/src/main/java/com/datatorrent/lib/math/SumKeyVal.java
index d79a490..99e2492 100644
--- a/library/src/main/java/com/datatorrent/lib/math/SumKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/math/SumKeyVal.java
@@ -23,13 +23,12 @@ import java.util.Map;
 
 import org.apache.commons.lang.mutable.MutableDouble;
 
-import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
-import com.datatorrent.lib.util.KeyValPair;
-
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.StreamCodec;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
+import com.datatorrent.lib.util.KeyValPair;
 
 /**
  * Emits the sum of values for each key at the end of window.
@@ -93,8 +92,7 @@ public class SumKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K
       SumEntry val = sums.get(key);
       if (val == null) {
         val = new SumEntry(new MutableDouble(tuple.getValue().doubleValue()), true);
-      }
-      else {
+      } else {
         val.sum.add(tuple.getValue().doubleValue());
         val.changed = true;
       }
@@ -194,12 +192,11 @@ public class SumKeyVal<K, V extends Number> extends BaseNumberKeyValueOperator<K
   public void clearCache()
   {
     if (cumulative) {
-      for (Map.Entry<K, SumEntry> e: sums.entrySet()) {
+      for (Map.Entry<K, SumEntry> e : sums.entrySet()) {
         SumEntry val = e.getValue();
         val.changed = false;
       }
-    }
-    else {
+    } else {
       sums.clear();
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java b/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java
index adc77fa..7f36ef5 100644
--- a/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java
+++ b/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java
@@ -18,12 +18,13 @@
  */
 package com.datatorrent.lib.math;
 
-import com.datatorrent.api.DefaultOutputPort;
-import org.xml.sax.InputSource;
-
 import java.io.StringReader;
 import java.util.List;
 
+import org.xml.sax.InputSource;
+
+import com.datatorrent.api.DefaultOutputPort;
+
 /**
  * An implementation of the AbstractXmlKeyValueCartesianProduct operator that takes in the xml document
  * as a String input and outputs the cartesian product as Strings.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindow.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindow.java b/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindow.java
index 231285f..7217086 100644
--- a/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindow.java
+++ b/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindow.java
@@ -22,9 +22,9 @@ import java.util.ArrayList;
 
 import javax.validation.constraints.Min;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  *
@@ -47,108 +47,108 @@ public abstract class AbstractSlidingWindow<T, S> extends BaseOperator
         /**
          * Input port for getting incoming data.
          */
-	public final transient DefaultInputPort<T> data = new DefaultInputPort<T>()
-	{
-		@Override
-		public void process(T tuple)
-		{
-			processDataTuple(tuple);
-		}
-	};
-
-	protected ArrayList<S> states = null;
-
-	protected S lastExpiredWindowState = null;
-
-	protected int currentCursor = -1;
-
-	@Min(2)
-	int windowSize = 2;
-
-	/**
-	 * getter function for n (number of previous window states
-	 *
-	 * @return n
-	 */
-	@Min(2)
-	public int getWindowSize()
-	{
-		return windowSize;
-	}
-
-	/**
-	 * setter for windowSize
-	 *
-	 * @param i
-	 */
-	public void setWindowSize(int windowSize)
-	{
-		this.windowSize = windowSize;
-	}
-
-	abstract protected void processDataTuple(T tuple);
-
-	/**
-	 * Implement this method to create the state object needs to be kept in the sliding window
-	 *
-	 * @return the state of current streaming window
-	 */
-	public abstract S createWindowState();
-
-	/**
-	 * Get the Streaming window state in it's coming the order start from 0
-	 *
-	 * @param i
-	 *   0 the state of the first coming streaming window
-	 *   -1 the state of the last expired streaming window
-	 * @return State of the streaming window
-	 * @throws ArrayIndexOutOfBoundsException if i >= sliding window size
-	 */
-	public S getStreamingWindowState(int i)
-	{
-	  if(i == -1){
-	    return lastExpiredWindowState;
-	  }
-		if (i >= getWindowSize()) {
-			throw new ArrayIndexOutOfBoundsException();
-		}
-		int index = (currentCursor + 1 + i) % windowSize ;
-		return states.get(index);
-	}
-
-	/**
-	 * Moves states by 1 and sets current state to null. If you override
-	 * beginWindow, you must call super.beginWindow(windowId) to ensure proper
-	 * operator behavior.
-	 *
-	 * @param windowId
-	 */
-	@Override
-	public void beginWindow(long windowId)
-	{
-	  // move currentCursor 1 position
-		currentCursor = (currentCursor + 1) % windowSize;
-		// expire the state at the first position which is the state of the streaming window moving out of the current application window
-		lastExpiredWindowState = states.get(currentCursor);
-
-		states.set(currentCursor, createWindowState());
-
-	}
-
-	/**
-	 * Sets up internal state structure
-	 *
-	 * @param context
-	 */
-	@Override
-	public void setup(OperatorContext context)
-	{
-	  super.setup(context);
-		states = new ArrayList<S>(windowSize);
-		//initialize the sliding window state to null
-		for (int i = 0; i < windowSize; i++) {
-			states.add(null);
-		}
-		currentCursor = -1;
-	}
+  public final transient DefaultInputPort<T> data = new DefaultInputPort<T>()
+  {
+    @Override
+    public void process(T tuple)
+    {
+      processDataTuple(tuple);
+    }
+  };
+
+  protected ArrayList<S> states = null;
+
+  protected S lastExpiredWindowState = null;
+
+  protected int currentCursor = -1;
+
+  @Min(2)
+  int windowSize = 2;
+
+  /**
+   * getter function for n (number of previous window states
+   *
+   * @return n
+   */
+  @Min(2)
+  public int getWindowSize()
+  {
+    return windowSize;
+  }
+
+  /**
+   * setter for windowSize
+   *
+   * @param windowSize
+   */
+  public void setWindowSize(int windowSize)
+  {
+    this.windowSize = windowSize;
+  }
+
+  protected abstract void processDataTuple(T tuple);
+
+  /**
+   * Implement this method to create the state object needs to be kept in the sliding window
+   *
+   * @return the state of current streaming window
+   */
+  public abstract S createWindowState();
+
+  /**
+   * Get the Streaming window state in it's coming the order start from 0
+   *
+   * @param i
+   *   0 the state of the first coming streaming window
+   *   -1 the state of the last expired streaming window
+   * @return State of the streaming window
+   * @throws ArrayIndexOutOfBoundsException if i >= sliding window size
+   */
+  public S getStreamingWindowState(int i)
+  {
+    if (i == -1) {
+      return lastExpiredWindowState;
+    }
+    if (i >= getWindowSize()) {
+      throw new ArrayIndexOutOfBoundsException();
+    }
+    int index = (currentCursor + 1 + i) % windowSize;
+    return states.get(index);
+  }
+
+  /**
+   * Moves states by 1 and sets current state to null. If you override
+   * beginWindow, you must call super.beginWindow(windowId) to ensure proper
+   * operator behavior.
+   *
+   * @param windowId
+   */
+  @Override
+  public void beginWindow(long windowId)
+  {
+    // move currentCursor 1 position
+    currentCursor = (currentCursor + 1) % windowSize;
+    // expire the state at the first position which is the state of the streaming window moving out of the current application window
+    lastExpiredWindowState = states.get(currentCursor);
+
+    states.set(currentCursor, createWindowState());
+
+  }
+
+  /**
+   * Sets up internal state structure
+   *
+   * @param context
+   */
+  @Override
+  public void setup(OperatorContext context)
+  {
+    super.setup(context);
+    states = new ArrayList<S>(windowSize);
+    //initialize the sliding window state to null
+    for (int i = 0; i < windowSize; i++) {
+      states.add(null);
+    }
+    currentCursor = -1;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindowKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindowKeyVal.java b/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindowKeyVal.java
index 35e777b..69e8819 100644
--- a/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindowKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/multiwindow/AbstractSlidingWindowKeyVal.java
@@ -52,104 +52,106 @@ import com.datatorrent.lib.util.KeyValPair;
  * @since 0.3.3
  */
 public abstract class AbstractSlidingWindowKeyVal<K, V extends Number, S extends SimpleMovingAverageObject>
-		extends BaseNumberKeyValueOperator<K, V>
+    extends BaseNumberKeyValueOperator<K, V>
 {
-	/**
-	 * buffer to hold state information of different windows.
-	 */
-	protected HashMap<K, ArrayList<S>> buffer = new HashMap<K, ArrayList<S>>();
-	/**
-	 * Index of windows stating at 0.
-	 */
-	protected int currentstate = -1;
+  /**
+   * buffer to hold state information of different windows.
+   */
+  protected HashMap<K, ArrayList<S>> buffer = new HashMap<K, ArrayList<S>>();
+  /**
+   * Index of windows stating at 0.
+   */
+  protected int currentstate = -1;
 
-	/**
-	 * Concrete class has to implement how they want the tuple to be processed.
-	 *
-	 * @param tuple
-	 *          a keyVal pair of tuple.
-	 */
-	public abstract void processDataTuple(KeyValPair<K, V> tuple);
+  /**
+   * Concrete class has to implement how they want the tuple to be processed.
+   *
+   * @param tuple
+   *          a keyVal pair of tuple.
+   */
+  public abstract void processDataTuple(KeyValPair<K, V> tuple);
 
-	/**
-	 * Concrete class has to implement what to emit at the end of window.
-	 *
-	 * @param key
-	 * @param obj
-	 */
-	public abstract void emitTuple(K key, ArrayList<S> obj);
+  /**
+   * Concrete class has to implement what to emit at the end of window.
+   *
+   * @param key
+   * @param obj
+   */
+  public abstract void emitTuple(K key, ArrayList<S> obj);
 
-	/**
-	 * Length of sliding windows. Minimum value is 2.
-	 */
-	@Min(2)
-	protected int windowSize = 2;
-	protected long windowId;
+  /**
+   * Length of sliding windows. Minimum value is 2.
+   */
+  @Min(2)
+  protected int windowSize = 2;
+  protected long windowId;
 
-	/**
-	 * Getter function for windowSize (number of previous window buffer).
-	 *
-	 * @return windowSize
-	 */
-	public int getWindowSize()
-	{
-		return windowSize;
-	}
+  /**
+   * Getter function for windowSize (number of previous window buffer).
+   *
+   * @return windowSize
+   */
+  public int getWindowSize()
+  {
+    return windowSize;
+  }
 
-	/**
-	 * @param windowSize
-	 */
-	public void setWindowSize(int windowSize)
-	{
-		this.windowSize = windowSize;
-	}
+  /**
+   * @param windowSize
+   */
+  public void setWindowSize(int windowSize)
+  {
+    this.windowSize = windowSize;
+  }
 
-	/**
-	 * Input port for getting incoming data.
-	 */
-	public final transient DefaultInputPort<KeyValPair<K, V>> data = new DefaultInputPort<KeyValPair<K, V>>()
-	{
-		@Override
-		public void process(KeyValPair<K, V> tuple)
-		{
-			processDataTuple(tuple);
-		}
-	};
+  /**
+   * Input port for getting incoming data.
+   */
+  public final transient DefaultInputPort<KeyValPair<K, V>> data = new DefaultInputPort<KeyValPair<K, V>>()
+  {
+    @Override
+    public void process(KeyValPair<K, V> tuple)
+    {
+      processDataTuple(tuple);
+    }
+  };
 
-	/**
-	 * Moves buffer by 1 and clear contents of current. If you override
-	 * beginWindow, you must call super.beginWindow(windowId) to ensure proper
-	 * operator behavior.
-	 *
-	 * @param windowId
-	 */
-	@Override
-	public void beginWindow(long windowId)
-	{
-		this.windowId = windowId;
-		currentstate++;
-		if (currentstate >= windowSize) {
-			for (Map.Entry<K, ArrayList<S>> e : buffer.entrySet()) {
-				ArrayList<S> states = e.getValue();
-                		S first = states.get(0);
-				for (int i=1; i < windowSize; i++) states.set(i-1, states.get(i));
-				states.set(windowSize-1, first);
-			}
-			currentstate = windowSize-1;
-		}
-		for (Map.Entry<K, ArrayList<S>> e : buffer.entrySet()) {
-			e.getValue().get(currentstate).clear();
-		}
-	}
+  /**
+   * Moves buffer by 1 and clear contents of current. If you override
+   * beginWindow, you must call super.beginWindow(windowId) to ensure proper
+   * operator behavior.
+   *
+   * @param windowId
+   */
+  @Override
+  public void beginWindow(long windowId)
+  {
+    this.windowId = windowId;
+    currentstate++;
+    if (currentstate >= windowSize) {
+      for (Map.Entry<K, ArrayList<S>> e : buffer.entrySet()) {
+        ArrayList<S> states = e.getValue();
+        S first = states.get(0);
+        for (int i = 1; i < windowSize; i++) {
+          states.set(i - 1, states.get(i));
+        }
+        states.set(windowSize - 1, first);
+      }
+      currentstate = windowSize - 1;
+    }
+    for (Map.Entry<K, ArrayList<S>> e : buffer.entrySet()) {
+      e.getValue().get(currentstate).clear();
+    }
+  }
 
-	/**
-	 * Emit tuple for each key.
-	 */
-	@Override
-	public void endWindow()
-	{
-		for (Map.Entry<K, ArrayList<S>> e : buffer.entrySet()) {
-			emitTuple(e.getKey(), e.getValue());
-		}
-	}
+  /**
+   * Emit tuple for each key.
+   */
+  @Override
+  public void endWindow()
+  {
+    for (Map.Entry<K, ArrayList<S>> e : buffer.entrySet()) {
+      emitTuple(e.getKey(), e.getValue());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyVal.java b/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyVal.java
index 09a9c24..ad4f8e5 100644
--- a/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowRangeKeyVal.java
@@ -71,7 +71,7 @@ public class MultiWindowRangeKeyVal<K, V extends Number> extends RangeKeyVal<K,
    * Clears the internal data before return
    */
   @SuppressWarnings({ "unchecked", "rawtypes" })
-	@Override
+  @Override
   public void endWindow()
   {
     boolean emit = (++windowCount) % windowSize == 0;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyVal.java b/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyVal.java
index d6463bd..7d5c377 100644
--- a/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/multiwindow/MultiWindowSumKeyVal.java
@@ -18,13 +18,14 @@
  */
 package com.datatorrent.lib.multiwindow;
 
+import java.util.Map;
+
+import javax.validation.constraints.Min;
+
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.lib.math.SumKeyVal;
 import com.datatorrent.lib.util.KeyValPair;
 
-import java.util.Map;
-import javax.validation.constraints.Min;
-
 /**
  * A sum operator of KeyValPair schema which accumulates sum across multiple
  * streaming windows.
@@ -53,45 +54,45 @@ import javax.validation.constraints.Min;
 @OperatorAnnotation(partitionable = false)
 public class MultiWindowSumKeyVal<K, V extends Number> extends SumKeyVal<K, V>
 {
-	/**
-	 * Number of streaming window after which tuple got emitted.
-	 */
-	@Min(2)
-	private int windowSize = 2;
-	private long windowCount = 0;
+  /**
+   * Number of streaming window after which tuple got emitted.
+   */
+  @Min(2)
+  private int windowSize = 2;
+  private long windowCount = 0;
 
-	public void setWindowSize(int windowSize)
-	{
-		this.windowSize = windowSize;
-	}
+  public void setWindowSize(int windowSize)
+  {
+    this.windowSize = windowSize;
+  }
 
-	/**
-	 * Emit only at the end of windowSize window boundary.
-	 */
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	@Override
-	public void endWindow()
-	{
-		boolean emit = (++windowCount) % windowSize == 0;
+  /**
+   * Emit only at the end of windowSize window boundary.
+   */
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  @Override
+  public void endWindow()
+  {
+    boolean emit = (++windowCount) % windowSize == 0;
 
-		if (!emit) {
-			return;
-		}
+    if (!emit) {
+      return;
+    }
 
-		// Emit only at the end of application window boundary.
-		boolean dosum = sum.isConnected();
+    // Emit only at the end of application window boundary.
+    boolean dosum = sum.isConnected();
 
-		if (dosum) {
-			for (Map.Entry<K, SumEntry> e : sums.entrySet()) {
-				K key = e.getKey();
-				if (dosum) {
-					sum.emit(new KeyValPair(key, getValue(e.getValue().sum.doubleValue())));
-				}
-			}
-		}
+    if (dosum) {
+      for (Map.Entry<K, SumEntry> e : sums.entrySet()) {
+        K key = e.getKey();
+        if (dosum) {
+          sum.emit(new KeyValPair(key, getValue(e.getValue().sum.doubleValue())));
+        }
+      }
+    }
 
-		// Clear cumulative sum at the end of application window boundary.
-		sums.clear();
-	}
+    // Clear cumulative sum at the end of application window boundary.
+    sums.clear();
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/multiwindow/SimpleMovingAverage.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/multiwindow/SimpleMovingAverage.java b/library/src/main/java/com/datatorrent/lib/multiwindow/SimpleMovingAverage.java
index c49c2b4..4d0a2c1 100644
--- a/library/src/main/java/com/datatorrent/lib/multiwindow/SimpleMovingAverage.java
+++ b/library/src/main/java/com/datatorrent/lib/multiwindow/SimpleMovingAverage.java
@@ -48,85 +48,85 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 @OperatorAnnotation(partitionable = false)
 public class SimpleMovingAverage<K, V extends Number> extends
-		AbstractSlidingWindowKeyVal<K, V, SimpleMovingAverageObject>
+    AbstractSlidingWindowKeyVal<K, V, SimpleMovingAverageObject>
 {
-	/**
-	 * Output port to emit simple moving average (SMA) of last N window as Double.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Double>> doubleSMA = new DefaultOutputPort<KeyValPair<K, Double>>();
-	/**
-	 * Output port to emit simple moving average (SMA) of last N window as Float.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Float>> floatSMA = new DefaultOutputPort<KeyValPair<K, Float>>();
-	/**
-	 * Output port to emit simple moving average (SMA) of last N window as Long.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Long>> longSMA = new DefaultOutputPort<KeyValPair<K, Long>>();
-	/**
-	 * Output port to emit simple moving average (SMA) of last N window as
-	 * Integer.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, Integer>> integerSMA = new DefaultOutputPort<KeyValPair<K, Integer>>();
+  /**
+   * Output port to emit simple moving average (SMA) of last N window as Double.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Double>> doubleSMA = new DefaultOutputPort<KeyValPair<K, Double>>();
+  /**
+   * Output port to emit simple moving average (SMA) of last N window as Float.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Float>> floatSMA = new DefaultOutputPort<KeyValPair<K, Float>>();
+  /**
+   * Output port to emit simple moving average (SMA) of last N window as Long.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Long>> longSMA = new DefaultOutputPort<KeyValPair<K, Long>>();
+  /**
+   * Output port to emit simple moving average (SMA) of last N window as
+   * Integer.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, Integer>> integerSMA = new DefaultOutputPort<KeyValPair<K, Integer>>();
 
-	/**
-	 * Create the list if key doesn't exist. Add value to buffer and increment
-	 * counter.
-	 *
-	 * @param tuple
-	 */
-	@Override
-	public void processDataTuple(KeyValPair<K, V> tuple)
-	{
-		K key = tuple.getKey();
-		double val = tuple.getValue().doubleValue();
-		ArrayList<SimpleMovingAverageObject> dataList = buffer.get(key);
+  /**
+   * Create the list if key doesn't exist. Add value to buffer and increment
+   * counter.
+   *
+   * @param tuple
+   */
+  @Override
+  public void processDataTuple(KeyValPair<K, V> tuple)
+  {
+    K key = tuple.getKey();
+    double val = tuple.getValue().doubleValue();
+    ArrayList<SimpleMovingAverageObject> dataList = buffer.get(key);
 
-		if (dataList == null) {
-			dataList = new ArrayList<SimpleMovingAverageObject>(windowSize);
-			for (int i = 0; i < windowSize; ++i) {
-				dataList.add(new SimpleMovingAverageObject());
-			}
-		}
+    if (dataList == null) {
+      dataList = new ArrayList<SimpleMovingAverageObject>(windowSize);
+      for (int i = 0; i < windowSize; ++i) {
+        dataList.add(new SimpleMovingAverageObject());
+      }
+    }
 
-		dataList.get(currentstate).add(val); // add to previous value
-		buffer.put(key, dataList);
-	}
+    dataList.get(currentstate).add(val); // add to previous value
+    buffer.put(key, dataList);
+  }
 
-	/**
-	 * Calculate average and emit in appropriate port.
-	 *
-	 * @param key
-	 * @param obj
-	 */
-	@Override
-	public void emitTuple(K key, ArrayList<SimpleMovingAverageObject> obj)
-	{
-		double sum = 0;
-		int count = 0;
-		for (int i = 0; i < windowSize; i++) {
-			SimpleMovingAverageObject d = obj.get(i);
-			sum += d.getSum();
-			count += d.getCount();
-		}
+  /**
+   * Calculate average and emit in appropriate port.
+   *
+   * @param key
+   * @param obj
+   */
+  @Override
+  public void emitTuple(K key, ArrayList<SimpleMovingAverageObject> obj)
+  {
+    double sum = 0;
+    int count = 0;
+    for (int i = 0; i < windowSize; i++) {
+      SimpleMovingAverageObject d = obj.get(i);
+      sum += d.getSum();
+      count += d.getCount();
+    }
 
-		if (count == 0) { // Nothing to emit.
-			return;
-		}
-		if (doubleSMA.isConnected()) {
-			doubleSMA.emit(new KeyValPair<K, Double>(key, (sum / count)));
-		}
-		if (floatSMA.isConnected()) {
-			floatSMA.emit(new KeyValPair<K, Float>(key, (float) (sum / count)));
-		}
-		if (longSMA.isConnected()) {
-			longSMA.emit(new KeyValPair<K, Long>(key, (long) (sum / count)));
-		}
-		if (integerSMA.isConnected()) {
-			integerSMA.emit(new KeyValPair<K, Integer>(key, (int) (sum / count)));
-		}
-	}
+    if (count == 0) { // Nothing to emit.
+      return;
+    }
+    if (doubleSMA.isConnected()) {
+      doubleSMA.emit(new KeyValPair<K, Double>(key, (sum / count)));
+    }
+    if (floatSMA.isConnected()) {
+      floatSMA.emit(new KeyValPair<K, Float>(key, (float)(sum / count)));
+    }
+    if (longSMA.isConnected()) {
+      longSMA.emit(new KeyValPair<K, Long>(key, (long)(sum / count)));
+    }
+    if (integerSMA.isConnected()) {
+      integerSMA.emit(new KeyValPair<K, Integer>(key, (int)(sum / count)));
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/multiwindow/SortedMovingWindow.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/multiwindow/SortedMovingWindow.java b/library/src/main/java/com/datatorrent/lib/multiwindow/SortedMovingWindow.java
index 8d3228b..892c822 100644
--- a/library/src/main/java/com/datatorrent/lib/multiwindow/SortedMovingWindow.java
+++ b/library/src/main/java/com/datatorrent/lib/multiwindow/SortedMovingWindow.java
@@ -28,10 +28,12 @@ import java.util.PriorityQueue;
 
 import javax.validation.constraints.NotNull;
 
+import org.apache.commons.lang.ClassUtils;
+
+import com.google.common.base.Function;
+
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
-import com.google.common.base.Function;
-import org.apache.commons.lang.ClassUtils;
 
 /**
  *
@@ -102,7 +104,7 @@ public class SortedMovingWindow<T, K> extends AbstractSlidingWindow<T, List<T>>
   {
     super.endWindow();
     tuplesInCurrentStreamWindow = new LinkedList<T>();
-    if(lastExpiredWindowState == null){
+    if (lastExpiredWindowState == null) {
       // not ready to emit value or empty in a certain window
       return;
     }
@@ -115,7 +117,7 @@ public class SortedMovingWindow<T, K> extends AbstractSlidingWindow<T, List<T>>
         int k = 0;
         if (comparator == null) {
           if (expiredTuple instanceof Comparable) {
-            k = ((Comparable<T>) expiredTuple).compareTo(minElemInSortedList);
+            k = ((Comparable<T>)expiredTuple).compareTo(minElemInSortedList);
           } else {
             errorOutput.emit(expiredTuple);
             throw new IllegalArgumentException("Operator \"" + ClassUtils.getShortClassName(this.getClass()) + "\" encounters an invalid tuple " + expiredTuple + "\nNeither the tuple is comparable Nor Comparator is specified!");

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/partitioner/StatsAwareStatelessPartitioner.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/partitioner/StatsAwareStatelessPartitioner.java b/library/src/main/java/com/datatorrent/lib/partitioner/StatsAwareStatelessPartitioner.java
index 6e2e01f..ed571b4 100644
--- a/library/src/main/java/com/datatorrent/lib/partitioner/StatsAwareStatelessPartitioner.java
+++ b/library/src/main/java/com/datatorrent/lib/partitioner/StatsAwareStatelessPartitioner.java
@@ -21,20 +21,24 @@ package com.datatorrent.lib.partitioner;
 import java.io.IOException;
 import java.io.ObjectInputStream;
 import java.io.Serializable;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 import javax.validation.constraints.Min;
 
-import com.google.common.collect.Sets;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Sets;
+
 import com.datatorrent.api.DefaultPartition;
 import com.datatorrent.api.Operator;
 import com.datatorrent.api.Partitioner;
 import com.datatorrent.api.StatsListener;
-
 import com.datatorrent.common.partitioner.StatelessPartitioner;
 
 /**
@@ -122,13 +126,11 @@ public abstract class StatsAwareStatelessPartitioner<T extends Operator> impleme
         response.repartitionRequired = true;
         logger.debug("setting repartition to true");
 
-      }
-      else if (!repartition) {
+      } else if (!repartition) {
         repartition = true;
         nextMillis = System.currentTimeMillis() + cooldownMillis;
       }
-    }
-    else {
+    } else {
       repartition = false;
     }
     return response;
@@ -147,8 +149,7 @@ public abstract class StatsAwareStatelessPartitioner<T extends Operator> impleme
       nextMillis = partitionNextMillis;
       // delegate to create initial list of partitions
       return new StatelessPartitioner<T>(initialPartitionCount).definePartitions(partitions, context);
-    }
-    else {
+    } else {
       // repartition call
       logger.debug("repartition call for operator");
       if (System.currentTimeMillis() < partitionNextMillis) {
@@ -169,8 +170,7 @@ public abstract class StatsAwareStatelessPartitioner<T extends Operator> impleme
             Partition<T> siblingPartition = lowLoadPartitions.remove(partitionKey & reducedMask);
             if (siblingPartition == null) {
               lowLoadPartitions.put(partitionKey & reducedMask, p);
-            }
-            else {
+            } else {
               // both of the partitions are low load, combine
               PartitionKeys newPks = new PartitionKeys(reducedMask, Sets.newHashSet(partitionKey & reducedMask));
               // put new value so the map gets marked as modified
@@ -181,8 +181,7 @@ public abstract class StatsAwareStatelessPartitioner<T extends Operator> impleme
               //LOG.debug("partition keys after merge {}", siblingPartition.getPartitionKeys());
             }
           }
-        }
-        else if (load > 0) {
+        } else if (load > 0) {
           // split bottlenecks
           Map<Operator.InputPort<?>, PartitionKeys> keys = p.getPartitionKeys();
           Map.Entry<Operator.InputPort<?>, PartitionKeys> e = keys.entrySet().iterator().next();
@@ -196,8 +195,7 @@ public abstract class StatsAwareStatelessPartitioner<T extends Operator> impleme
             int key = e.getValue().partitions.iterator().next();
             int key2 = (newMask ^ e.getValue().mask) | key;
             newKeys = Sets.newHashSet(key, key2);
-          }
-          else {
+          } else {
             // assign keys to separate partitions
             newMask = e.getValue().mask;
             newKeys = e.getValue().partitions;
@@ -208,8 +206,7 @@ public abstract class StatsAwareStatelessPartitioner<T extends Operator> impleme
             newPartition.getPartitionKeys().put(e.getKey(), new PartitionKeys(newMask, Sets.newHashSet(key)));
             newPartitions.add(newPartition);
           }
-        }
-        else {
+        } else {
           // leave unchanged
           newPartitions.add(p);
         }
@@ -229,8 +226,8 @@ public abstract class StatsAwareStatelessPartitioner<T extends Operator> impleme
     partitionedInstanceStatus.clear();
     for (Map.Entry<Integer, Partition<T>> entry : partitions.entrySet()) {
       if (partitionedInstanceStatus.containsKey(entry.getKey())) {
-      }
-      else {
+        //FIXME
+      } else {
         partitionedInstanceStatus.put(entry.getKey(), null);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/script/JavaScriptOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/script/JavaScriptOperator.java b/library/src/main/java/com/datatorrent/lib/script/JavaScriptOperator.java
index 1c5c96a..4e607f1 100644
--- a/library/src/main/java/com/datatorrent/lib/script/JavaScriptOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/script/JavaScriptOperator.java
@@ -18,14 +18,22 @@
  */
 package com.datatorrent.lib.script;
 
-import com.datatorrent.api.Context.OperatorContext;
-
 import java.util.HashMap;
 import java.util.Map;
-import javax.script.*;
+
+import javax.script.Invocable;
+import javax.script.ScriptContext;
+import javax.script.ScriptEngine;
+import javax.script.ScriptEngineManager;
+import javax.script.ScriptException;
+import javax.script.SimpleBindings;
+import javax.script.SimpleScriptContext;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Context.OperatorContext;
+
 /**
  * An implementation of ScriptOperator that executes JavaScript on tuples input for Map &lt;String, Object&gt;.
  *
@@ -46,7 +54,7 @@ import org.slf4j.LoggerFactory;
  *
  * // Connect to output console operator
  * ConsoleOutputOperator console = dag.addOperator(&quot;console&quot;,
- * 		new ConsoleOutputOperator());
+ *   new ConsoleOutputOperator());
  * dag.addStream(&quot;rand_console&quot;, script.result, console.input);
  *
  * </pre>
@@ -54,17 +62,17 @@ import org.slf4j.LoggerFactory;
  * <b> Sample Input Operator(emit)</b>
  *
  * <pre>
- *  	.
- * 		.
- * 		public void emitTuples() {
- * 			HashMap<String, Object> map = new HashMap<String, Object>();
- * 			map.put("val", random.nextInt());
- * 			outport.emit(map);
- * 			.
- * 			.
- * 		}
- * 		.
- * 		.
+ * .
+ * .
+ * public void emitTuples() {
+ *    HashMap<String, Object> map = new HashMap<String, Object>();
+ *    map.put("val", random.nextInt());
+ *    outport.emit(map);
+ *    .
+ *    .
+ * }
+ * .
+ * .
  * </pre>
  *
  * This operator does not checkpoint interpreted functions in the variable bindings because they are not serializable
@@ -80,9 +88,8 @@ public class JavaScriptOperator extends ScriptOperator
 
   public enum Type
   {
-
     EVAL, INVOKE
-  };
+  }
 
   protected transient ScriptEngineManager sem = new ScriptEngineManager();
   protected transient ScriptEngine engine = sem.getEngineByName("JavaScript");
@@ -108,6 +115,8 @@ public class JavaScriptOperator extends ScriptOperator
         case INVOKE:
           evalResult = ((Invocable)engine).invokeFunction(script);
           break;
+        default:
+          //fallthru
       }
 
       if (isPassThru && result.isConnected()) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java b/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java
index f10b04c..9532180 100644
--- a/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java
@@ -18,16 +18,18 @@
  */
 package com.datatorrent.lib.script;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.annotation.InputPortFieldAnnotation;
-import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+
 import javax.validation.constraints.NotNull;
 
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.annotation.InputPortFieldAnnotation;
+import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * A base implementation of a BaseOperator for language script operator.&nbsp; Subclasses should provide the 
    implementation of getting the bindings and process method. 
@@ -96,5 +98,6 @@ public abstract class ScriptOperator extends BaseOperator
   }
 
   public abstract void process(Map<String, Object> tuple);
+
   public abstract Map<String, Object> getBindings();
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/statistics/MedianOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/statistics/MedianOperator.java b/library/src/main/java/com/datatorrent/lib/statistics/MedianOperator.java
index ebbe94a..95debb4 100644
--- a/library/src/main/java/com/datatorrent/lib/statistics/MedianOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/statistics/MedianOperator.java
@@ -21,10 +21,10 @@ package com.datatorrent.lib.statistics;
 import java.util.ArrayList;
 import java.util.Collections;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OperatorAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * An implementation of BaseOperator that computes median of incoming data. <br>
@@ -77,7 +77,9 @@ public class MedianOperator extends BaseOperator
   @Override
   public void endWindow()
   {
-    if (values.size() == 0) return;
+    if (values.size() == 0) {
+      return;
+    }
     if (values.size() == 1) {
       median.emit(values.get(0));
       return;
@@ -86,9 +88,9 @@ public class MedianOperator extends BaseOperator
     // median value
     Collections.sort(values);
     int medianIndex = values.size() / 2;
-    if (values.size() %2 == 0) {
-      Double value = values.get(medianIndex-1);
-      value = (value + values.get(medianIndex))/2;
+    if (values.size() % 2 == 0) {
+      Double value = values.get(medianIndex - 1);
+      value = (value + values.get(medianIndex)) / 2;
       median.emit(value);
     } else {
       median.emit(values.get(medianIndex));

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/statistics/ModeOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/statistics/ModeOperator.java b/library/src/main/java/com/datatorrent/lib/statistics/ModeOperator.java
index 84c9c46..f23f7b7 100644
--- a/library/src/main/java/com/datatorrent/lib/statistics/ModeOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/statistics/ModeOperator.java
@@ -21,10 +21,10 @@ package com.datatorrent.lib.statistics;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OperatorAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * An implementation of BaseOperator that calculates most frequent value occurring in incoming data. <br>
@@ -63,7 +63,7 @@ public class ModeOperator<V extends Comparable<?>> extends BaseOperator
     {
       if (values.containsKey(tuple)) {
         Integer count = values.remove(tuple);
-        values.put(tuple, count+1);
+        values.put(tuple, count + 1);
       } else {
         values.put(tuple, 1);
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/statistics/StandardDeviation.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/statistics/StandardDeviation.java b/library/src/main/java/com/datatorrent/lib/statistics/StandardDeviation.java
index 0a47450..8aaf63e 100644
--- a/library/src/main/java/com/datatorrent/lib/statistics/StandardDeviation.java
+++ b/library/src/main/java/com/datatorrent/lib/statistics/StandardDeviation.java
@@ -20,11 +20,11 @@ package com.datatorrent.lib.statistics;
 
 import java.util.ArrayList;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * An implementation of BaseOperator that computes variance and standard deviation over incoming data. <br>
@@ -67,7 +67,7 @@ public class StandardDeviation extends BaseOperator
   /**
    * Variance output port.
    */
-  @OutputPortFieldAnnotation(optional=true)
+  @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Number> variance = new DefaultOutputPort<Number>();
 
   /**
@@ -82,19 +82,21 @@ public class StandardDeviation extends BaseOperator
   public void endWindow()
   {
     // no values.
-    if (values.size() == 0) return;
+    if (values.size() == 0) {
+      return;
+    }
 
     // get mean first.
     double mean = 0.0;
     for (Double value : values) {
       mean += value;
     }
-    mean = mean/values.size();
+    mean = mean / values.size();
 
     // get variance
     double outVal = 0.0;
     for (Double value : values) {
-      outVal += (value-mean)*(value-mean);
+      outVal += (value - mean) * (value - mean);
     }
     outVal = outVal / values.size();
     if (variance.isConnected()) {



[19/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOUtils.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOUtils.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOUtils.java
index 605c6b9..c866918 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOUtils.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOUtils.java
@@ -90,7 +90,7 @@ public class GPOUtils
   public static Map<String, Type> buildTypeMap(JSONObject jo) throws JSONException
   {
     Map<String, Type> fieldToType = Maps.newHashMap();
-    for(Iterator<String> keys = (Iterator<String>) jo.keys();
+    for (Iterator<String> keys = (Iterator<String>)jo.keys();
         keys.hasNext();) {
       String key = keys.next();
       String val = jo.getString(key);
@@ -107,14 +107,13 @@ public class GPOUtils
    * @param dpou The JSONObject to deserialize from.
    * @return The deserialized GPOMutable object.
    */
-  public static GPOMutable deserialize(FieldsDescriptor fieldsDescriptor,
-                                       JSONObject dpou)
+  public static GPOMutable deserialize(FieldsDescriptor fieldsDescriptor, JSONObject dpou)
   {
     GPOMutable gpo = new GPOMutable(fieldsDescriptor);
     @SuppressWarnings("unchecked")
-    Iterator<String> itr = (Iterator<String>) dpou.keys();
+    Iterator<String> itr = (Iterator<String>)dpou.keys();
 
-    while(itr.hasNext()) {
+    while (itr.hasNext()) {
       String field = itr.next();
       setFieldFromJSON(gpo, field, dpou);
     }
@@ -128,8 +127,7 @@ public class GPOUtils
    * @param dpou The {@link JSONObject} which contains the fields whose values need to be fetched.
    * @return A {@link Map} whose keys are field names, and whose values are possible values for those fields.
    */
-  public static Map<String, Set<Object>> deserializeToMap(FieldsDescriptor fieldsDescriptor,
-                                                          JSONObject dpou)
+  public static Map<String, Set<Object>> deserializeToMap(FieldsDescriptor fieldsDescriptor, JSONObject dpou)
   {
     Map<String, Set<Object>> keyToValues = Maps.newHashMap();
 
@@ -149,7 +147,7 @@ public class GPOUtils
 
       if (keyValue instanceof JSONArray) {
 
-        JSONArray ja = (JSONArray) keyValue;
+        JSONArray ja = (JSONArray)keyValue;
         keyValues = Sets.newHashSetWithExpectedSize(ja.length());
 
         Type type = fieldsDescriptor.getType(key);
@@ -244,25 +242,16 @@ public class GPOUtils
     Type type = fd.getType(field);
     int intVal = 0;
 
-    if(numericTypeIntOrSmaller(type)) {
+    if (numericTypeIntOrSmaller(type)) {
       try {
         intVal = jo.getInt(field);
       } catch (JSONException ex) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " does not have a valid "
-                                           + type
-                                           + " value.", ex);
+        throw new IllegalArgumentException("The key " + field + " does not have a valid " + type + " value.", ex);
       }
 
       if (type != Type.INTEGER && !insideRange(type, intVal)) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " has a value "
-                                           + intVal
-                                           + " which is out of range for a "
-                                           + type
-                                           + ".");
+        throw new IllegalArgumentException("The key " + field + " has a value " + intVal
+            + " which is out of range for a " + type + ".");
       }
     }
 
@@ -273,9 +262,9 @@ public class GPOUtils
         throw new IllegalArgumentException("The key " + field + " does not have a valid bool value.", ex);
       }
     } else if (type == Type.BYTE) {
-      return ((byte) intVal);
+      return ((byte)intVal);
     } else if (type == Type.SHORT) {
-      return ((short) intVal);
+      return ((short)intVal);
     } else if (type == Type.INTEGER) {
       return intVal;
     } else if (type == Type.LONG) {
@@ -352,25 +341,16 @@ public class GPOUtils
   {
     int intVal = 0;
 
-    if(numericTypeIntOrSmaller(type)) {
+    if (numericTypeIntOrSmaller(type)) {
       try {
         intVal = ja.getInt(index);
       } catch (JSONException ex) {
-        throw new IllegalArgumentException("The index "
-                                           + index
-                                           + " does not have a valid "
-                                           + type
-                                           + " value.", ex);
+        throw new IllegalArgumentException("The index " + index + " does not have a valid " + type + " value.", ex);
       }
 
       if (type != Type.INTEGER && !insideRange(type, intVal)) {
-        throw new IllegalArgumentException("The index "
-                                           + index
-                                           + " has a value "
-                                           + intVal
-                                           + " which is out of range for a "
-                                           + type
-                                           + ".");
+        throw new IllegalArgumentException("The index " + index + " has a value " + intVal
+            + " which is out of range for a " + type + ".");
       }
     }
 
@@ -381,9 +361,9 @@ public class GPOUtils
         throw new IllegalArgumentException("The index " + index + " does not have a valid bool value.", ex);
       }
     } else if (type == Type.BYTE) {
-      return ((byte) intVal);
+      return ((byte)intVal);
     } else if (type == Type.SHORT) {
-      return ((short) intVal);
+      return ((short)intVal);
     } else if (type == Type.INTEGER) {
       return intVal;
     } else if (type == Type.LONG) {
@@ -436,7 +416,7 @@ public class GPOUtils
       }
     } else if (type == Type.FLOAT) {
       try {
-        return (float) ja.getDouble(index);
+        return (float)ja.getDouble(index);
       } catch (JSONException ex) {
         throw new IllegalArgumentException("The index "
                                            + index
@@ -462,7 +442,7 @@ public class GPOUtils
     JSONObject jo = new JSONObject();
     FieldsDescriptor fd = gpo.getFieldDescriptor();
 
-    for(String field: fields.getFields()) {
+    for (String field : fields.getFields()) {
       Type fieldType = fd.getType(field);
       GPOType gpoType = GPOType.GPO_TYPE_ARRAY[fieldType.ordinal()];
       gpoType.serializeJSONObject(jo, gpo, field, resultFormatter);
@@ -496,14 +476,12 @@ public class GPOUtils
 
     List<Type> types = fd.getTypesList();
 
-    for(int typeIndex = 0;
-        typeIndex < types.size();
-        typeIndex++) {
+    for (int typeIndex = 0; typeIndex < types.size(); typeIndex++) {
       Type type = types.get(typeIndex);
 
-      switch(type) {
+      switch (type) {
         case STRING: {
-          for(String val: gpo.getFieldsString()) {
+          for (String val : gpo.getFieldsString()) {
             arrayLength += Type.INTEGER.getByteSize();
             arrayLength += val.getBytes().length;
           }
@@ -537,114 +515,76 @@ public class GPOUtils
     MutableInt offset = new MutableInt(0);
 
     boolean[] fieldsBoolean = gpo.getFieldsBoolean();
-    if(fieldsBoolean != null) {
-      for(int index = 0;
-          index < fieldsBoolean.length;
-          index++) {
-        serializeBoolean(fieldsBoolean[index],
-                         sbytes,
-                         offset);
+    if (fieldsBoolean != null) {
+      for (int index = 0; index < fieldsBoolean.length; index++) {
+        serializeBoolean(fieldsBoolean[index], sbytes, offset);
       }
     }
 
     char[] fieldsCharacter = gpo.getFieldsCharacter();
-    if(fieldsCharacter != null) {
-      for(int index = 0;
-          index < fieldsCharacter.length;
-          index++) {
-        serializeChar(fieldsCharacter[index],
-                      sbytes,
-                      offset);
+    if (fieldsCharacter != null) {
+      for (int index = 0; index < fieldsCharacter.length; index++) {
+        serializeChar(fieldsCharacter[index], sbytes, offset);
       }
     }
 
     byte[] fieldsByte = gpo.getFieldsByte();
-    if(fieldsByte != null) {
-      for(int index = 0;
-          index < fieldsByte.length;
-          index++) {
-        serializeByte(fieldsByte[index],
-                      sbytes,
-                      offset);
+    if (fieldsByte != null) {
+      for (int index = 0; index < fieldsByte.length; index++) {
+        serializeByte(fieldsByte[index], sbytes, offset);
       }
     }
 
     short[] fieldsShort = gpo.getFieldsShort();
-    if(fieldsShort != null) {
-      for(int index = 0;
-          index < fieldsShort.length;
-          index++) {
-        serializeShort(fieldsShort[index],
-                      sbytes,
-                      offset);
+    if (fieldsShort != null) {
+      for (int index = 0; index < fieldsShort.length; index++) {
+        serializeShort(fieldsShort[index], sbytes, offset);
       }
     }
 
     int[] fieldsInteger = gpo.getFieldsInteger();
-    if(fieldsInteger != null) {
-      for(int index = 0;
-          index < fieldsInteger.length;
-          index++) {
-        serializeInt(fieldsInteger[index],
-                      sbytes,
-                      offset);
+    if (fieldsInteger != null) {
+      for (int index = 0; index < fieldsInteger.length; index++) {
+        serializeInt(fieldsInteger[index], sbytes, offset);
       }
     }
 
     long[] fieldsLong = gpo.getFieldsLong();
-    if(fieldsLong != null) {
-      for(int index = 0;
-          index < fieldsLong.length;
-          index++) {
-        serializeLong(fieldsLong[index],
-                      sbytes,
-                      offset);
+    if (fieldsLong != null) {
+      for (int index = 0; index < fieldsLong.length; index++) {
+        serializeLong(fieldsLong[index], sbytes, offset);
       }
     }
 
     float[] fieldsFloat = gpo.getFieldsFloat();
-    if(fieldsFloat != null) {
-      for(int index = 0;
-          index < fieldsFloat.length;
-          index++) {
-        serializeFloat(fieldsFloat[index],
-                      sbytes,
-                      offset);
+    if (fieldsFloat != null) {
+      for (int index = 0; index < fieldsFloat.length; index++) {
+        serializeFloat(fieldsFloat[index], sbytes, offset);
       }
     }
 
     double[] fieldsDouble = gpo.getFieldsDouble();
-    if(fieldsDouble != null) {
-      for(int index = 0;
-          index < fieldsDouble.length;
-          index++) {
-        serializeDouble(fieldsDouble[index],
-                      sbytes,
-                      offset);
+    if (fieldsDouble != null) {
+      for (int index = 0; index < fieldsDouble.length; index++) {
+        serializeDouble(fieldsDouble[index], sbytes, offset);
       }
     }
 
     String[] fieldsString = gpo.getFieldsString();
-    if(fieldsString != null) {
-      for(int index = 0;
-          index < fieldsString.length;
-          index++) {
-        serializeString(fieldsString[index],
-                      sbytes,
-                      offset);
+    if (fieldsString != null) {
+      for (int index = 0; index < fieldsString.length; index++) {
+        serializeString(fieldsString[index], sbytes, offset);
       }
     }
 
-    if(sbytes.length > 0) {
+    if (sbytes.length > 0) {
       byteArrayList.add(sbytes);
     }
 
     Object[] fieldsObject = gpo.getFieldsObject();
     Serde[] serdes = gpo.getFieldDescriptor().getSerdes();
-    if(fieldsObject != null) {
-      for(int index = 0;
-          index < fieldsObject.length;
-          index++) {
+    if (fieldsObject != null) {
+      for (int index = 0; index < fieldsObject.length; index++) {
         byteArrayList.add(serdes[index].serializeObject(fieldsObject[index]));
       }
     }
@@ -669,8 +609,8 @@ public class GPOUtils
     Set<String> fields = gpo.getFieldDescriptor().getFields().getFields();
     Set<String> exFieldsSet = excludedFields.getFields();
 
-    for(String field: fields) {
-      if(exFieldsSet.contains(field)) {
+    for (String field : fields) {
+      if (exFieldsSet.contains(field)) {
         continue;
       }
 
@@ -691,99 +631,77 @@ public class GPOUtils
    * @param offset An offset in the byte array to start deserializing from.
    * @return The deserialized GPOMutable.
    */
-  public static GPOMutable deserialize(FieldsDescriptor fd,
-                                       byte[] serializedGPO,
-                                       MutableInt offset)
+  public static GPOMutable deserialize(FieldsDescriptor fd, byte[] serializedGPO, MutableInt offset)
   {
     GPOMutable gpo = new GPOMutable(fd);
 
     boolean[] fieldsBoolean = gpo.getFieldsBoolean();
-    if(fieldsBoolean != null) {
-      for(int index = 0;
-          index < fieldsBoolean.length;
-          index++) {
+    if (fieldsBoolean != null) {
+      for (int index = 0; index < fieldsBoolean.length; index++) {
         fieldsBoolean[index] = deserializeBoolean(serializedGPO, offset);
       }
     }
 
     char[] fieldsCharacter = gpo.getFieldsCharacter();
-    if(fieldsCharacter != null) {
-      for(int index = 0;
-          index < fieldsCharacter.length;
-          index++) {
+    if (fieldsCharacter != null) {
+      for (int index = 0; index < fieldsCharacter.length; index++) {
         fieldsCharacter[index] = deserializeChar(serializedGPO, offset);
       }
     }
 
     byte[] fieldsByte = gpo.getFieldsByte();
-    if(fieldsByte != null) {
-      for(int index = 0;
-          index < fieldsByte.length;
-          index++) {
+    if (fieldsByte != null) {
+      for (int index = 0; index < fieldsByte.length; index++) {
         fieldsByte[index] = deserializeByte(serializedGPO, offset);
       }
     }
 
     short[] fieldsShort = gpo.getFieldsShort();
-    if(fieldsShort != null) {
-      for(int index = 0;
-          index < fieldsShort.length;
-          index++) {
+    if (fieldsShort != null) {
+      for (int index = 0; index < fieldsShort.length; index++) {
         fieldsShort[index] = deserializeShort(serializedGPO, offset);
       }
     }
 
     int[] fieldsInteger = gpo.getFieldsInteger();
-    if(fieldsInteger != null) {
-      for(int index = 0;
-          index < fieldsInteger.length;
-          index++) {
+    if (fieldsInteger != null) {
+      for (int index = 0; index < fieldsInteger.length; index++) {
         fieldsInteger[index] = deserializeInt(serializedGPO, offset);
       }
     }
 
     long[] fieldsLong = gpo.getFieldsLong();
-    if(fieldsLong != null) {
-      for(int index = 0;
-          index < fieldsLong.length;
-          index++) {
+    if (fieldsLong != null) {
+      for (int index = 0; index < fieldsLong.length; index++) {
         fieldsLong[index] = deserializeLong(serializedGPO, offset);
       }
     }
 
     float[] fieldsFloat = gpo.getFieldsFloat();
-    if(fieldsFloat != null) {
-      for(int index = 0;
-          index < fieldsFloat.length;
-          index++) {
+    if (fieldsFloat != null) {
+      for (int index = 0; index < fieldsFloat.length; index++) {
         fieldsFloat[index] = deserializeFloat(serializedGPO, offset);
       }
     }
 
     double[] fieldsDouble = gpo.getFieldsDouble();
-    if(fieldsDouble != null) {
-      for(int index = 0;
-          index < fieldsDouble.length;
-          index++) {
+    if (fieldsDouble != null) {
+      for (int index = 0; index < fieldsDouble.length; index++) {
         fieldsDouble[index] = deserializeDouble(serializedGPO, offset);
       }
     }
 
     String[] fieldsString = gpo.getFieldsString();
-    if(fieldsString != null) {
-      for(int index = 0;
-          index < fieldsString.length;
-          index++) {
+    if (fieldsString != null) {
+      for (int index = 0; index < fieldsString.length; index++) {
         fieldsString[index] = deserializeString(serializedGPO, offset);
       }
     }
 
     Object[] fieldsObject = gpo.getFieldsObject();
     Serde[] serdes = fd.getSerdes();
-    if(fieldsObject != null) {
-      for(int index = 0;
-          index < fieldsObject.length;
-          index++) {
+    if (fieldsObject != null) {
+      for (int index = 0; index < fieldsObject.length; index++) {
         fieldsObject[index] = serdes[index].deserializeObject(serializedGPO, offset);
       }
     }
@@ -800,18 +718,16 @@ public class GPOUtils
    * @param offset The offset in the provided array to start deserializing from.
    * @return The deserialized {@link GPOMutable}.
    */
-  public static GPOMutable deserialize(FieldsDescriptor fieldsDescriptor,
-                                       Fields excludedFields,
-                                       byte[] serializedGPO,
-                                       int offset)
+  public static GPOMutable deserialize(FieldsDescriptor fieldsDescriptor, Fields excludedFields, byte[] serializedGPO,
+      int offset)
   {
     GPOMutable gpo = new GPOMutable(fieldsDescriptor);
     MutableInt offsetM = new MutableInt(offset);
 
     Set<String> exFieldsSet = excludedFields.getFields();
 
-    for(String field: fieldsDescriptor.getFields().getFields()) {
-      if(exFieldsSet.contains(field)) {
+    for (String field : fieldsDescriptor.getFields().getFields()) {
+      if (exFieldsSet.contains(field)) {
         continue;
       }
 
@@ -830,11 +746,9 @@ public class GPOUtils
    * @param offset The offset to deserialize from.
    * @return The deserialized string.
    */
-  public static String deserializeString(byte[] buffer,
-                                         MutableInt offset)
+  public static String deserializeString(byte[] buffer, MutableInt offset)
   {
-    int length = deserializeInt(buffer,
-                                offset);
+    int length = deserializeInt(buffer, offset);
 
     String val = new String(buffer, offset.intValue(), length);
     offset.add(length);
@@ -848,20 +762,14 @@ public class GPOUtils
    * @param buffer The byte buffer to serialize to.
    * @param offset The offset in the buffer to serialize to and also to increment appropriately.
    */
-  public static void serializeString(String val,
-                                     byte[] buffer,
-                                     MutableInt offset)
+  public static void serializeString(String val, byte[] buffer, MutableInt offset)
   {
     byte[] stringBytes = val.getBytes();
     int length = stringBytes.length;
 
-    serializeInt(length,
-                 buffer,
-                 offset);
+    serializeInt(length, buffer, offset);
 
-    for(int index = 0;
-        index < length;
-        index++) {
+    for (int index = 0; index < length; index++) {
       buffer[offset.intValue() + index] = stringBytes[index];
     }
 
@@ -888,18 +796,17 @@ public class GPOUtils
    * @param offset The offset to deserialize from.
    * @return The deserialized long.
    */
-  public static long deserializeLong(byte[] buffer,
-                                     MutableInt offset)
+  public static long deserializeLong(byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    long val = ((((long) buffer[0 + offsetInt]) & 0xFFL) << 56) |
-           ((((long) buffer[1 + offsetInt]) & 0xFFL) << 48) |
-           ((((long) buffer[2 + offsetInt]) & 0xFFL) << 40) |
-           ((((long) buffer[3 + offsetInt]) & 0xFFL) << 32) |
-           ((((long) buffer[4 + offsetInt]) & 0xFFL) << 24) |
-           ((((long) buffer[5 + offsetInt]) & 0xFFL) << 16) |
-           ((((long) buffer[6 + offsetInt]) & 0xFFL) << 8)  |
-           (((long) buffer[7 + offsetInt]) & 0xFFL);
+    long val = ((((long)buffer[0 + offsetInt]) & 0xFFL) << 56) |
+        ((((long)buffer[1 + offsetInt]) & 0xFFL) << 48) |
+        ((((long)buffer[2 + offsetInt]) & 0xFFL) << 40) |
+        ((((long)buffer[3 + offsetInt]) & 0xFFL) << 32) |
+        ((((long)buffer[4 + offsetInt]) & 0xFFL) << 24) |
+        ((((long)buffer[5 + offsetInt]) & 0xFFL) << 16) |
+        ((((long)buffer[6 + offsetInt]) & 0xFFL) << 8) |
+        (((long)buffer[7 + offsetInt]) & 0xFFL);
 
     offset.add(Type.LONG.getByteSize());
     return val;
@@ -912,19 +819,17 @@ public class GPOUtils
    * @param buffer The byte buffer to serialize to.
    * @param offset The offset in the buffer to serialize to and also to increment appropriately.
    */
-  public static void serializeLong(long val,
-                                   byte[] buffer,
-                                   MutableInt offset)
+  public static void serializeLong(long val, byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    buffer[0 + offsetInt] = (byte) ((val >> 56) & 0xFFL);
-    buffer[1 + offsetInt] = (byte) ((val >> 48) & 0xFFL);
-    buffer[2 + offsetInt] = (byte) ((val >> 40) & 0xFFL);
-    buffer[3 + offsetInt] = (byte) ((val >> 32) & 0xFFL);
-    buffer[4 + offsetInt] = (byte) ((val >> 24) & 0xFFL);
-    buffer[5 + offsetInt] = (byte) ((val >> 16) & 0xFFL);
-    buffer[6 + offsetInt] = (byte) ((val >> 8) & 0xFFL);
-    buffer[7 + offsetInt] = (byte) (val & 0xFFL);
+    buffer[0 + offsetInt] = (byte)((val >> 56) & 0xFFL);
+    buffer[1 + offsetInt] = (byte)((val >> 48) & 0xFFL);
+    buffer[2 + offsetInt] = (byte)((val >> 40) & 0xFFL);
+    buffer[3 + offsetInt] = (byte)((val >> 32) & 0xFFL);
+    buffer[4 + offsetInt] = (byte)((val >> 24) & 0xFFL);
+    buffer[5 + offsetInt] = (byte)((val >> 16) & 0xFFL);
+    buffer[6 + offsetInt] = (byte)((val >> 8) & 0xFFL);
+    buffer[7 + offsetInt] = (byte)(val & 0xFFL);
 
     offset.add(Type.LONG.getByteSize());
   }
@@ -938,14 +843,14 @@ public class GPOUtils
   {
     byte[] buffer = new byte[Type.LONG.getByteSize()];
 
-    buffer[0] = (byte) ((val >> 56) & 0xFFL);
-    buffer[1] = (byte) ((val >> 48) & 0xFFL);
-    buffer[2] = (byte) ((val >> 40) & 0xFFL);
-    buffer[3] = (byte) ((val >> 32) & 0xFFL);
-    buffer[4] = (byte) ((val >> 24) & 0xFFL);
-    buffer[5] = (byte) ((val >> 16) & 0xFFL);
-    buffer[6] = (byte) ((val >> 8) & 0xFFL);
-    buffer[7] = (byte) (val & 0xFFL);
+    buffer[0] = (byte)((val >> 56) & 0xFFL);
+    buffer[1] = (byte)((val >> 48) & 0xFFL);
+    buffer[2] = (byte)((val >> 40) & 0xFFL);
+    buffer[3] = (byte)((val >> 32) & 0xFFL);
+    buffer[4] = (byte)((val >> 24) & 0xFFL);
+    buffer[5] = (byte)((val >> 16) & 0xFFL);
+    buffer[6] = (byte)((val >> 8) & 0xFFL);
+    buffer[7] = (byte)(val & 0xFFL);
 
     return buffer;
   }
@@ -968,18 +873,17 @@ public class GPOUtils
    * @param offset The offset to deserialize from.
    * @return The deserialized double.
    */
-  public static double deserializeDouble(byte[] buffer,
-                                       MutableInt offset)
+  public static double deserializeDouble(byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    long val = (((long) buffer[0 + offsetInt]) & 0xFFL) << 56 |
-           ((((long) buffer[1 + offsetInt]) & 0xFFL) << 48) |
-           ((((long) buffer[2 + offsetInt]) & 0xFFL) << 40) |
-           ((((long) buffer[3 + offsetInt]) & 0xFFL) << 32) |
-           ((((long) buffer[4 + offsetInt]) & 0xFFL) << 24) |
-           ((((long) buffer[5 + offsetInt]) & 0xFFL) << 16) |
-           ((((long) buffer[6 + offsetInt]) & 0xFFL) << 8)  |
-           (((long) buffer[7 + offsetInt]) & 0xFFL);
+    long val = (((long)buffer[0 + offsetInt]) & 0xFFL) << 56 |
+        ((((long)buffer[1 + offsetInt]) & 0xFFL) << 48) |
+        ((((long)buffer[2 + offsetInt]) & 0xFFL) << 40) |
+        ((((long)buffer[3 + offsetInt]) & 0xFFL) << 32) |
+        ((((long)buffer[4 + offsetInt]) & 0xFFL) << 24) |
+        ((((long)buffer[5 + offsetInt]) & 0xFFL) << 16) |
+        ((((long)buffer[6 + offsetInt]) & 0xFFL) << 8) |
+        (((long)buffer[7 + offsetInt]) & 0xFFL);
 
     offset.add(Type.DOUBLE.getByteSize());
     return Double.longBitsToDouble(val);
@@ -992,21 +896,19 @@ public class GPOUtils
    * @param buffer The byte buffer to serialize to.
    * @param offset The offset in the buffer to serialize to and also to increment appropriately.
    */
-  public static void serializeDouble(double valD,
-                                   byte[] buffer,
-                                   MutableInt offset)
+  public static void serializeDouble(double valD, byte[] buffer, MutableInt offset)
   {
     long val = Double.doubleToLongBits(valD);
 
     int offsetInt = offset.intValue();
-    buffer[0 + offsetInt] = (byte) ((val >> 56) & 0xFFL);
-    buffer[1 + offsetInt] = (byte) ((val >> 48) & 0xFFL);
-    buffer[2 + offsetInt] = (byte) ((val >> 40) & 0xFFL);
-    buffer[3 + offsetInt] = (byte) ((val >> 32) & 0xFFL);
-    buffer[4 + offsetInt] = (byte) ((val >> 24) & 0xFFL);
-    buffer[5 + offsetInt] = (byte) ((val >> 16) & 0xFFL);
-    buffer[6 + offsetInt] = (byte) ((val >> 8) & 0xFFL);
-    buffer[7 + offsetInt] = (byte) (val & 0xFFL);
+    buffer[0 + offsetInt] = (byte)((val >> 56) & 0xFFL);
+    buffer[1 + offsetInt] = (byte)((val >> 48) & 0xFFL);
+    buffer[2 + offsetInt] = (byte)((val >> 40) & 0xFFL);
+    buffer[3 + offsetInt] = (byte)((val >> 32) & 0xFFL);
+    buffer[4 + offsetInt] = (byte)((val >> 24) & 0xFFL);
+    buffer[5 + offsetInt] = (byte)((val >> 16) & 0xFFL);
+    buffer[6 + offsetInt] = (byte)((val >> 8) & 0xFFL);
+    buffer[7 + offsetInt] = (byte)(val & 0xFFL);
 
     offset.add(Type.DOUBLE.getByteSize());
   }
@@ -1016,14 +918,14 @@ public class GPOUtils
     byte[] buffer = new byte[Type.DOUBLE.getByteSize()];
     long val = Double.doubleToLongBits(valD);
 
-    buffer[0] = (byte) ((val >> 56) & 0xFFL);
-    buffer[1] = (byte) ((val >> 48) & 0xFFL);
-    buffer[2] = (byte) ((val >> 40) & 0xFFL);
-    buffer[3] = (byte) ((val >> 32) & 0xFFL);
-    buffer[4] = (byte) ((val >> 24) & 0xFFL);
-    buffer[5] = (byte) ((val >> 16) & 0xFFL);
-    buffer[6] = (byte) ((val >> 8) & 0xFFL);
-    buffer[7] = (byte) (val & 0xFFL);
+    buffer[0] = (byte)((val >> 56) & 0xFFL);
+    buffer[1] = (byte)((val >> 48) & 0xFFL);
+    buffer[2] = (byte)((val >> 40) & 0xFFL);
+    buffer[3] = (byte)((val >> 32) & 0xFFL);
+    buffer[4] = (byte)((val >> 24) & 0xFFL);
+    buffer[5] = (byte)((val >> 16) & 0xFFL);
+    buffer[6] = (byte)((val >> 8) & 0xFFL);
+    buffer[7] = (byte)(val & 0xFFL);
 
     return buffer;
   }
@@ -1035,14 +937,13 @@ public class GPOUtils
    * @param offset The offset to deserialize from.
    * @return The deserialized integer.
    */
-  public static int deserializeInt(byte[] buffer,
-                                   MutableInt offset)
+  public static int deserializeInt(byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    int val = ((((int) buffer[0 + offsetInt]) & 0xFF) << 24) |
-           ((((int) buffer[1 + offsetInt]) & 0xFF) << 16) |
-           ((((int) buffer[2 + offsetInt]) & 0xFF) << 8)  |
-           (((int) buffer[3 + offsetInt]) & 0xFF);
+    int val = ((((int)buffer[0 + offsetInt]) & 0xFF) << 24) |
+        ((((int)buffer[1 + offsetInt]) & 0xFF) << 16) |
+        ((((int)buffer[2 + offsetInt]) & 0xFF) << 8) |
+        (((int)buffer[3 + offsetInt]) & 0xFF);
 
     offset.add(Type.INTEGER.getByteSize());
     return val;
@@ -1066,15 +967,13 @@ public class GPOUtils
    * @param buffer The byte buffer to serialize to.
    * @param offset The offset in the buffer to serialize to and also to increment appropriately.
    */
-  public static void serializeInt(int val,
-                                  byte[] buffer,
-                                  MutableInt offset)
+  public static void serializeInt(int val, byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    buffer[0 + offsetInt] = (byte) ((val >> 24) & 0xFF);
-    buffer[1 + offsetInt] = (byte) ((val >> 16) & 0xFF);
-    buffer[2 + offsetInt] = (byte) ((val >> 8) & 0xFF);
-    buffer[3 + offsetInt] = (byte) (val & 0xFF);
+    buffer[0 + offsetInt] = (byte)((val >> 24) & 0xFF);
+    buffer[1 + offsetInt] = (byte)((val >> 16) & 0xFF);
+    buffer[2 + offsetInt] = (byte)((val >> 8) & 0xFF);
+    buffer[3 + offsetInt] = (byte)(val & 0xFF);
 
     offset.add(Type.INTEGER.getByteSize());
   }
@@ -1084,13 +983,14 @@ public class GPOUtils
    * @param val The value to serialize.
    * @return The serialized integer value.
    */
-  public static byte[] serializeInt(int val) {
+  public static byte[] serializeInt(int val)
+  {
     byte[] buffer = new byte[Type.INTEGER.getByteSize()];
 
-    buffer[0] = (byte) ((val >> 24) & 0xFF);
-    buffer[1] = (byte) ((val >> 16) & 0xFF);
-    buffer[2] = (byte) ((val >> 8) & 0xFF);
-    buffer[3] = (byte) (val & 0xFF);
+    buffer[0] = (byte)((val >> 24) & 0xFF);
+    buffer[1] = (byte)((val >> 16) & 0xFF);
+    buffer[2] = (byte)((val >> 8) & 0xFF);
+    buffer[3] = (byte)(val & 0xFF);
 
     return buffer;
   }
@@ -1102,14 +1002,13 @@ public class GPOUtils
    * @param offset The offset to deserialize from.
    * @return The deserialized float.
    */
-  public static float deserializeFloat(byte[] buffer,
-                                   MutableInt offset)
+  public static float deserializeFloat(byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    int val = ((((int) buffer[0 + offsetInt]) & 0xFF) << 24) |
-           ((((int) buffer[1 + offsetInt]) & 0xFF) << 16) |
-           ((((int) buffer[2 + offsetInt]) & 0xFF) << 8)  |
-           (((int) buffer[3 + offsetInt]) & 0xFF);
+    int val = ((((int)buffer[0 + offsetInt]) & 0xFF) << 24) |
+        ((((int)buffer[1 + offsetInt]) & 0xFF) << 16) |
+        ((((int)buffer[2 + offsetInt]) & 0xFF) << 8) |
+        (((int)buffer[3 + offsetInt]) & 0xFF);
 
     offset.add(Type.FLOAT.getByteSize());
     return Float.intBitsToFloat(val);
@@ -1122,17 +1021,15 @@ public class GPOUtils
    * @param buffer The byte buffer to serialize to.
    * @param offset The offset in the buffer to serialize to and also to increment appropriately.
    */
-  public static void serializeFloat(float valf,
-                                  byte[] buffer,
-                                  MutableInt offset)
+  public static void serializeFloat(float valf, byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
     int val = Float.floatToIntBits(valf);
 
-    buffer[0 + offsetInt] = (byte) ((val >> 24) & 0xFF);
-    buffer[1 + offsetInt] = (byte) ((val >> 16) & 0xFF);
-    buffer[2 + offsetInt] = (byte) ((val >> 8) & 0xFF);
-    buffer[3 + offsetInt] = (byte) (val & 0xFF);
+    buffer[0 + offsetInt] = (byte)((val >> 24) & 0xFF);
+    buffer[1 + offsetInt] = (byte)((val >> 16) & 0xFF);
+    buffer[2 + offsetInt] = (byte)((val >> 8) & 0xFF);
+    buffer[3 + offsetInt] = (byte)(val & 0xFF);
 
     offset.add(Type.FLOAT.getByteSize());
   }
@@ -1142,10 +1039,10 @@ public class GPOUtils
     byte[] buffer = new byte[Type.FLOAT.getByteSize()];
     int val = Float.floatToIntBits(valf);
 
-    buffer[0] = (byte) ((val >> 24) & 0xFF);
-    buffer[1] = (byte) ((val >> 16) & 0xFF);
-    buffer[2] = (byte) ((val >> 8) & 0xFF);
-    buffer[3] = (byte) (val & 0xFF);
+    buffer[0] = (byte)((val >> 24) & 0xFF);
+    buffer[1] = (byte)((val >> 16) & 0xFF);
+    buffer[2] = (byte)((val >> 8) & 0xFF);
+    buffer[3] = (byte)(val & 0xFF);
 
     return buffer;
   }
@@ -1157,12 +1054,11 @@ public class GPOUtils
    * @param offset The offset to deserialize from.
    * @return The deserialized short.
    */
-  public static short deserializeShort(byte[] buffer,
-                                       MutableInt offset)
+  public static short deserializeShort(byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    short val = (short) (((((int) buffer[0 + offsetInt]) & 0xFF) << 8)  |
-                (((int) buffer[1 + offsetInt]) & 0xFF));
+    short val = (short)(((((int)buffer[0 + offsetInt]) & 0xFF) << 8) |
+        (((int)buffer[1 + offsetInt]) & 0xFF));
 
     offset.add(Type.SHORT.getByteSize());
     return val;
@@ -1175,13 +1071,11 @@ public class GPOUtils
    * @param buffer The byte buffer to serialize to.
    * @param offset The offset in the buffer to serialize to and also to increment appropriately.
    */
-  public static void serializeShort(short val,
-                                    byte[] buffer,
-                                    MutableInt offset)
+  public static void serializeShort(short val, byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    buffer[0 + offsetInt] = (byte) ((val >> 8) & 0xFF);
-    buffer[1 + offsetInt] = (byte) (val & 0xFF);
+    buffer[0 + offsetInt] = (byte)((val >> 8) & 0xFF);
+    buffer[1 + offsetInt] = (byte)(val & 0xFF);
 
     offset.add(Type.SHORT.getByteSize());
   }
@@ -1190,8 +1084,8 @@ public class GPOUtils
   {
     byte[] buffer = new byte[Type.SHORT.getByteSize()];
 
-    buffer[0] = (byte) ((val >> 8) & 0xFF);
-    buffer[1] = (byte) (val & 0xFF);
+    buffer[0] = (byte)((val >> 8) & 0xFF);
+    buffer[1] = (byte)(val & 0xFF);
 
     return buffer;
   }
@@ -1203,8 +1097,7 @@ public class GPOUtils
    * @param offset The offset to deserialize from.
    * @return The deserialized byte.
    */
-  public static byte deserializeByte(byte[] buffer,
-                                     MutableInt offset)
+  public static byte deserializeByte(byte[] buffer, MutableInt offset)
   {
     byte val = buffer[offset.intValue()];
 
@@ -1220,9 +1113,7 @@ public class GPOUtils
    * @param buffer The byte buffer to serialize to.
    * @param offset The offset in the buffer to serialize to and also to increment appropriately.
    */
-  public static void serializeByte(byte val,
-                                   byte[] buffer,
-                                   MutableInt offset)
+  public static void serializeByte(byte val, byte[] buffer, MutableInt offset)
   {
     buffer[offset.intValue()] = val;
 
@@ -1241,8 +1132,7 @@ public class GPOUtils
    * @param offset The offset to deserialize from.
    * @return The deserialized boolean.
    */
-  public static boolean deserializeBoolean(byte[] buffer,
-                                           MutableInt offset)
+  public static boolean deserializeBoolean(byte[] buffer, MutableInt offset)
   {
     boolean val = buffer[offset.intValue()] != 0;
 
@@ -1257,18 +1147,16 @@ public class GPOUtils
    * @param buffer The byte buffer to serialize to.
    * @param offset The offset in the buffer to serialize to and also to increment appropriately.
    */
-  public static void serializeBoolean(boolean val,
-                                      byte[] buffer,
-                                      MutableInt offset)
+  public static void serializeBoolean(boolean val, byte[] buffer, MutableInt offset)
   {
-    buffer[offset.intValue()] = (byte) (val ? 1: 0);
+    buffer[offset.intValue()] = (byte)(val ? 1 : 0);
 
     offset.add(Type.BOOLEAN.getByteSize());
   }
 
   public static byte[] serializeBoolean(boolean val)
   {
-    return new byte[]{(byte) (val ? 1: 0)};
+    return new byte[]{(byte)(val ? 1 : 0)};
   }
 
   /**
@@ -1278,12 +1166,11 @@ public class GPOUtils
    * @param offset The offset to deserialize from.
    * @return The deserialized character.
    */
-  public static char deserializeChar(byte[] buffer,
-                                     MutableInt offset)
+  public static char deserializeChar(byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    char val = (char) (((((int) buffer[0 + offsetInt]) & 0xFF) << 8)  |
-                (((int) buffer[1 + offsetInt]) & 0xFF));
+    char val = (char)(((((int)buffer[0 + offsetInt]) & 0xFF) << 8) |
+        (((int)buffer[1 + offsetInt]) & 0xFF));
 
     offset.add(Type.CHAR.getByteSize());
     return val;
@@ -1296,13 +1183,11 @@ public class GPOUtils
    * @param buffer The byte buffer to serialize to.
    * @param offset The offset in the buffer to serialize to and also to increment appropriately.
    */
-  public static void serializeChar(char val,
-                                   byte[] buffer,
-                                   MutableInt offset)
+  public static void serializeChar(char val, byte[] buffer, MutableInt offset)
   {
     int offsetInt = offset.intValue();
-    buffer[0 + offsetInt] = (byte) ((val >> 8) & 0xFF);
-    buffer[1 + offsetInt] = (byte) (val & 0xFF);
+    buffer[0 + offsetInt] = (byte)((val >> 8) & 0xFF);
+    buffer[1 + offsetInt] = (byte)(val & 0xFF);
 
     offset.add(Type.CHAR.getByteSize());
   }
@@ -1311,8 +1196,8 @@ public class GPOUtils
   {
     byte[] buffer = new byte[Type.CHAR.getByteSize()];
 
-    buffer[0] = (byte) ((val >> 8) & 0xFF);
-    buffer[1] = (byte) (val & 0xFF);
+    buffer[0] = (byte)((val >> 8) & 0xFF);
+    buffer[1] = (byte)(val & 0xFF);
 
     return buffer;
   }
@@ -1330,20 +1215,15 @@ public class GPOUtils
    * @return An array of boolean getters for given fields.
    */
   @SuppressWarnings("unchecked")
-  public static <T> T[] createGetters(List<String> fields,
-                                      Map<String, String> valueToExpression,
-                                      Class<?> clazz,
-                                      Class<?> getterClazz,
-                                      Class<?> getterMethodClazz)
+  public static <T> T[] createGetters(List<String> fields, Map<String, String> valueToExpression,
+      Class<?> clazz, Class<?> getterClazz, Class<?> getterMethodClazz)
   {
     @SuppressWarnings("unchecked")
     T[] getters = (T[])Array.newInstance(getterMethodClazz, fields.size());
 
-    for(int getterIndex = 0;
-        getterIndex < fields.size();
-        getterIndex++) {
+    for (int getterIndex = 0; getterIndex < fields.size(); getterIndex++) {
       String field = fields.get(getterIndex);
-      getters[getterIndex] = (T) PojoUtils.constructGetter(clazz, valueToExpression.get(field), getterClazz);
+      getters[getterIndex] = (T)PojoUtils.constructGetter(clazz, valueToExpression.get(field), getterClazz);
     }
 
     return getters;
@@ -1359,16 +1239,13 @@ public class GPOUtils
    * @param clazz The Class of the POJO to extract values from.
    * @return An array of boolean getters for given fields.
    */
-  public static Getter<Object, String>[] createGettersString(List<String> fields,
-                                                             Map<String, String> valueToExpression,
-                                                             Class<?> clazz)
+  public static Getter<Object, String>[] createGettersString(List<String> fields, Map<String, String> valueToExpression,
+      Class<?> clazz)
   {
     @SuppressWarnings({"unchecked","rawtypes"})
     Getter<Object, String>[] getters = new Getter[fields.size()];
 
-    for(int getterIndex = 0;
-        getterIndex < fields.size();
-        getterIndex++) {
+    for (int getterIndex = 0; getterIndex < fields.size(); getterIndex++) {
       String field = fields.get(getterIndex);
       getters[getterIndex] = PojoUtils.createGetter(clazz, valueToExpression.get(field), String.class);
     }
@@ -1386,16 +1263,13 @@ public class GPOUtils
    * @param clazz The Class of the POJO to extract values from.
    * @return An array of boolean getters for given fields.
    */
-  public static Getter<Object, Object>[] createGettersObject(List<String> fields,
-                                                             Map<String, String> valueToExpression,
-                                                             Class<?> clazz)
+  public static Getter<Object, Object>[] createGettersObject(List<String> fields, Map<String, String> valueToExpression,
+      Class<?> clazz)
   {
     @SuppressWarnings({"unchecked","rawtypes"})
     Getter<Object, Object>[] getters = new Getter[fields.size()];
 
-    for(int getterIndex = 0;
-        getterIndex < fields.size();
-        getterIndex++) {
+    for (int getterIndex = 0; getterIndex < fields.size(); getterIndex++) {
       String field = fields.get(getterIndex);
       getters[getterIndex] = PojoUtils.createGetter(clazz, valueToExpression.get(field), Object.class);
     }
@@ -1413,14 +1287,13 @@ public class GPOUtils
    * @return The {@link GPOGetters} object which can be used to convert POJOs into {@link GPOMutable} objects initialized
    * with the same {@link FieldsDescriptor} object.
    */
-  public static GPOGetters buildGPOGetters(Map<String, String> fieldToGetter,
-                                           FieldsDescriptor fieldsDescriptor,
-                                           Class<?> clazz)
+  public static GPOGetters buildGPOGetters(Map<String, String> fieldToGetter, FieldsDescriptor fieldsDescriptor,
+      Class<?> clazz)
   {
     GPOGetters gpoGetters = new GPOGetters();
     Map<Type, List<String>> typeToFields = fieldsDescriptor.getTypeToFields();
 
-    for(Map.Entry<Type, List<String>> entry: typeToFields.entrySet()) {
+    for (Map.Entry<Type, List<String>> entry : typeToFields.entrySet()) {
       Type inputType = entry.getKey();
       GPOType gpoType = GPOType.GPO_TYPE_ARRAY[inputType.ordinal()];
       List<String> fields = entry.getValue();
@@ -1446,10 +1319,8 @@ public class GPOUtils
       boolean[] tempBools = mutable.getFieldsBoolean();
       GetterBoolean<Object>[] tempGetterBools = getters.gettersBoolean;
 
-      if(tempBools != null) {
-        for(int index = 0;
-            index < tempBools.length;
-            index++) {
+      if (tempBools != null) {
+        for (int index = 0; index < tempBools.length; index++) {
           tempBools[index] = tempGetterBools[index].get(object);
         }
       }
@@ -1459,10 +1330,8 @@ public class GPOUtils
       byte[] tempBytes = mutable.getFieldsByte();
       GetterByte<Object>[] tempGetterByte = getters.gettersByte;
 
-      if(tempBytes != null) {
-        for(int index = 0;
-            index < tempBytes.length;
-            index++) {
+      if (tempBytes != null) {
+        for (int index = 0; index < tempBytes.length; index++) {
           tempBytes[index] = tempGetterByte[index].get(object);
         }
       }
@@ -1472,10 +1341,8 @@ public class GPOUtils
       char[] tempChar = mutable.getFieldsCharacter();
       GetterChar<Object>[] tempGetterChar = getters.gettersChar;
 
-      if(tempChar != null) {
-        for(int index = 0;
-            index < tempChar.length;
-            index++) {
+      if (tempChar != null) {
+        for (int index = 0; index < tempChar.length; index++) {
           tempChar[index] = tempGetterChar[index].get(object);
         }
       }
@@ -1485,10 +1352,8 @@ public class GPOUtils
       double[] tempDouble = mutable.getFieldsDouble();
       GetterDouble<Object>[] tempGetterDouble = getters.gettersDouble;
 
-      if(tempDouble != null) {
-        for(int index = 0;
-            index < tempDouble.length;
-            index++) {
+      if (tempDouble != null) {
+        for (int index = 0; index < tempDouble.length; index++) {
           tempDouble[index] = tempGetterDouble[index].get(object);
         }
       }
@@ -1498,10 +1363,8 @@ public class GPOUtils
       float[] tempFloat = mutable.getFieldsFloat();
       GetterFloat<Object>[] tempGetterFloat = getters.gettersFloat;
 
-      if(tempFloat != null) {
-        for(int index = 0;
-            index < tempFloat.length;
-            index++) {
+      if (tempFloat != null) {
+        for (int index = 0; index < tempFloat.length; index++) {
           tempFloat[index] = tempGetterFloat[index].get(object);
         }
       }
@@ -1511,10 +1374,8 @@ public class GPOUtils
       int[] tempInt = mutable.getFieldsInteger();
       GetterInt<Object>[] tempGetterInt = getters.gettersInteger;
 
-      if(tempInt != null) {
-        for(int index = 0;
-            index < tempInt.length;
-            index++) {
+      if (tempInt != null) {
+        for (int index = 0; index < tempInt.length; index++) {
           tempInt[index] = tempGetterInt[index].get(object);
         }
       }
@@ -1524,10 +1385,8 @@ public class GPOUtils
       long[] tempLong = mutable.getFieldsLong();
       GetterLong<Object>[] tempGetterLong = getters.gettersLong;
 
-      if(tempLong != null) {
-        for(int index = 0;
-            index < tempLong.length;
-            index++) {
+      if (tempLong != null) {
+        for (int index = 0; index < tempLong.length; index++) {
           tempLong[index] = tempGetterLong[index].get(object);
         }
       }
@@ -1537,10 +1396,8 @@ public class GPOUtils
       short[] tempShort = mutable.getFieldsShort();
       GetterShort<Object>[] tempGetterShort = getters.gettersShort;
 
-      if(tempShort != null) {
-        for(int index = 0;
-            index < tempShort.length;
-            index++) {
+      if (tempShort != null) {
+        for (int index = 0; index < tempShort.length; index++) {
           tempShort[index] = tempGetterShort[index].get(object);
         }
       }
@@ -1550,29 +1407,23 @@ public class GPOUtils
       String[] tempString = mutable.getFieldsString();
       Getter<Object, String>[] tempGetterString = getters.gettersString;
 
-      if(tempString != null) {
-        for(int index = 0;
-            index < tempString.length;
-            index++) {
+      if (tempString != null) {
+        for (int index = 0; index < tempString.length; index++) {
           tempString[index] = tempGetterString[index].get(object);
         }
       }
     }
   }
 
-  public static void indirectCopy(GPOMutable dest,
-                                  GPOMutable src,
-                                  IndexSubset indexSubset)
+  public static void indirectCopy(GPOMutable dest, GPOMutable src, IndexSubset indexSubset)
   {
     {
       String[] destString = dest.getFieldsString();
       String[] srcString = src.getFieldsString();
       int[] srcIndex = indexSubset.fieldsStringIndexSubset;
-      if(destString != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destString != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           destString[index] = srcString[srcIndex[index]];
@@ -1584,11 +1435,9 @@ public class GPOUtils
       boolean[] destBoolean = dest.getFieldsBoolean();
       boolean[] srcBoolean = src.getFieldsBoolean();
       int[] srcIndex = indexSubset.fieldsBooleanIndexSubset;
-      if(destBoolean != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destBoolean != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           destBoolean[index] = srcBoolean[srcIndex[index]];
@@ -1600,11 +1449,9 @@ public class GPOUtils
       char[] destChar = dest.getFieldsCharacter();
       char[] srcChar = src.getFieldsCharacter();
       int[] srcIndex = indexSubset.fieldsBooleanIndexSubset;
-      if(destChar != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destChar != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           destChar[index] = srcChar[srcIndex[index]];
@@ -1616,11 +1463,9 @@ public class GPOUtils
       byte[] destByte = dest.getFieldsByte();
       byte[] srcByte = src.getFieldsByte();
       int[] srcIndex = indexSubset.fieldsByteIndexSubset;
-      if(destByte != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destByte != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           destByte[index] = srcByte[srcIndex[index]];
@@ -1632,11 +1477,9 @@ public class GPOUtils
       short[] destShort = dest.getFieldsShort();
       short[] srcShort = src.getFieldsShort();
       int[] srcIndex = indexSubset.fieldsShortIndexSubset;
-      if(destShort != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destShort != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           destShort[index] = srcShort[srcIndex[index]];
@@ -1648,11 +1491,9 @@ public class GPOUtils
       int[] destInteger = dest.getFieldsInteger();
       int[] srcInteger = src.getFieldsInteger();
       int[] srcIndex = indexSubset.fieldsIntegerIndexSubset;
-      if(destInteger != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destInteger != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           destInteger[index] = srcInteger[srcIndex[index]];
@@ -1664,11 +1505,9 @@ public class GPOUtils
       long[] destLong = dest.getFieldsLong();
       long[] srcLong = src.getFieldsLong();
       int[] srcIndex = indexSubset.fieldsLongIndexSubset;
-      if(destLong != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destLong != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
 
@@ -1681,11 +1520,9 @@ public class GPOUtils
       float[] destFloat = dest.getFieldsFloat();
       float[] srcFloat = src.getFieldsFloat();
       int[] srcIndex = indexSubset.fieldsFloatIndexSubset;
-      if(destFloat != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destFloat != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           destFloat[index] = srcFloat[srcIndex[index]];
@@ -1697,11 +1534,9 @@ public class GPOUtils
       double[] destDouble = dest.getFieldsDouble();
       double[] srcDouble = src.getFieldsDouble();
       int[] srcIndex = indexSubset.fieldsDoubleIndexSubset;
-      if(destDouble != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destDouble != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           destDouble[index] = srcDouble[srcIndex[index]];
@@ -1713,11 +1548,9 @@ public class GPOUtils
       Object[] destObject = dest.getFieldsObject();
       Object[] srcObject = src.getFieldsObject();
       int[] srcIndex = indexSubset.fieldsObjectIndexSubset;
-      if(destObject != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destObject != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           destObject[index] = srcObject[srcIndex[index]];
@@ -1732,10 +1565,8 @@ public class GPOUtils
 
     {
       String[] stringArray = gpo.getFieldsString();
-      if(stringArray != null) {
-        for(int index = 0;
-            index < stringArray.length;
-            index++) {
+      if (stringArray != null) {
+        for (int index = 0; index < stringArray.length; index++) {
           hashCode ^= stringArray[index].hashCode();
         }
       }
@@ -1743,21 +1574,17 @@ public class GPOUtils
 
     {
       boolean[] booleanArray = gpo.getFieldsBoolean();
-      if(booleanArray != null) {
-        for(int index = 0;
-            index < booleanArray.length;
-            index++) {
-          hashCode ^= booleanArray[index] ? 1: 0;
+      if (booleanArray != null) {
+        for (int index = 0; index < booleanArray.length; index++) {
+          hashCode ^= booleanArray[index] ? 1 : 0;
         }
       }
     }
 
     {
       char[] charArray = gpo.getFieldsCharacter();
-      if(charArray != null) {
-        for(int index = 0;
-            index < charArray.length;
-            index++) {
+      if (charArray != null) {
+        for (int index = 0; index < charArray.length; index++) {
           hashCode ^= Character.getNumericValue(charArray[index]);
         }
       }
@@ -1765,10 +1592,8 @@ public class GPOUtils
 
     {
       byte[] byteArray = gpo.getFieldsByte();
-      if(byteArray != null) {
-        for(int index = 0;
-            index < byteArray.length;
-            index++) {
+      if (byteArray != null) {
+        for (int index = 0; index < byteArray.length; index++) {
           hashCode ^= byteArray[index];
         }
       }
@@ -1776,10 +1601,8 @@ public class GPOUtils
 
     {
       short[] shortArray = gpo.getFieldsShort();
-      if(shortArray != null) {
-        for(int index = 0;
-            index < shortArray.length;
-            index++) {
+      if (shortArray != null) {
+        for (int index = 0; index < shortArray.length; index++) {
           hashCode ^= shortArray[index];
         }
       }
@@ -1787,10 +1610,8 @@ public class GPOUtils
 
     {
       int[] integerArray = gpo.getFieldsInteger();
-      if(integerArray != null) {
-        for(int index = 0;
-            index < integerArray.length;
-            index++) {
+      if (integerArray != null) {
+        for (int index = 0; index < integerArray.length; index++) {
           hashCode ^= integerArray[index];
         }
       }
@@ -1798,10 +1619,8 @@ public class GPOUtils
 
     {
       long[] longArray = gpo.getFieldsLong();
-      if(longArray != null) {
-        for(int index = 0;
-            index < longArray.length;
-            index++) {
+      if (longArray != null) {
+        for (int index = 0; index < longArray.length; index++) {
           hashCode ^= longArray[index];
         }
       }
@@ -1809,10 +1628,8 @@ public class GPOUtils
 
     {
       float[] floatArray = gpo.getFieldsFloat();
-      if(floatArray != null) {
-        for(int index = 0;
-            index < floatArray.length;
-            index++) {
+      if (floatArray != null) {
+        for (int index = 0; index < floatArray.length; index++) {
           hashCode ^= Float.floatToIntBits(floatArray[index]);
         }
       }
@@ -1820,10 +1637,8 @@ public class GPOUtils
 
     {
       double[] doubleArray = gpo.getFieldsDouble();
-      if(doubleArray != null) {
-        for(int index = 0;
-            index < doubleArray.length;
-            index++) {
+      if (doubleArray != null) {
+        for (int index = 0; index < doubleArray.length; index++) {
           hashCode ^= Double.doubleToLongBits(doubleArray[index]);
         }
       }
@@ -1831,10 +1646,8 @@ public class GPOUtils
 
     {
       Object[] objectArray = gpo.getFieldsObject();
-      if(objectArray != null) {
-        for(int index = 0;
-            index < objectArray.length;
-            index++) {
+      if (objectArray != null) {
+        for (int index = 0; index < objectArray.length; index++) {
           hashCode ^= objectArray[index].hashCode();
         }
       }
@@ -1857,8 +1670,7 @@ public class GPOUtils
    * @param indexSubset The subset of the {@link GPOMutable} used to compute the hashcode.
    * @return The hashcode for the given {@link GPOMutable} computed from the specified subset of its data.
    */
-  public static int indirectHashcode(GPOMutable gpo,
-                                     IndexSubset indexSubset)
+  public static int indirectHashcode(GPOMutable gpo, IndexSubset indexSubset)
   {
     int hashCode = 7;
     final int hashMultiplier = 23;
@@ -1866,11 +1678,9 @@ public class GPOUtils
     {
       String[] stringArray = gpo.getFieldsString();
       int[] srcIndex = indexSubset.fieldsStringIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           hashCode = hashMultiplier * hashCode + stringArray[srcIndex[index]].hashCode();
@@ -1881,14 +1691,12 @@ public class GPOUtils
     {
       boolean[] booleanArray = gpo.getFieldsBoolean();
       int[] srcIndex = indexSubset.fieldsBooleanIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          hashCode = hashMultiplier * hashCode + (booleanArray[srcIndex[index]] ? 1: 0);
+          hashCode = hashMultiplier * hashCode + (booleanArray[srcIndex[index]] ? 1 : 0);
         }
       }
     }
@@ -1896,11 +1704,9 @@ public class GPOUtils
     {
       char[] charArray = gpo.getFieldsCharacter();
       int[] srcIndex = indexSubset.fieldsCharacterIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           hashCode = hashMultiplier * hashCode + Character.getNumericValue(charArray[srcIndex[index]]);
@@ -1911,11 +1717,9 @@ public class GPOUtils
     {
       byte[] byteArray = gpo.getFieldsByte();
       int[] srcIndex = indexSubset.fieldsByteIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           hashCode = hashMultiplier * hashCode + byteArray[srcIndex[index]];
@@ -1926,11 +1730,9 @@ public class GPOUtils
     {
       short[] shortArray = gpo.getFieldsShort();
       int[] srcIndex = indexSubset.fieldsShortIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           hashCode = hashMultiplier * hashCode + shortArray[srcIndex[index]];
@@ -1941,11 +1743,9 @@ public class GPOUtils
     {
       int[] integerArray = gpo.getFieldsInteger();
       int[] srcIndex = indexSubset.fieldsIntegerIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           hashCode = hashMultiplier * hashCode + integerArray[srcIndex[index]];
@@ -1956,15 +1756,13 @@ public class GPOUtils
     {
       long[] longArray = gpo.getFieldsLong();
       int[] srcIndex = indexSubset.fieldsLongIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           long element = longArray[srcIndex[index]];
-          int elementHash = (int) (element ^ (element >>> 32));
+          int elementHash = (int)(element ^ (element >>> 32));
           hashCode = hashMultiplier * hashCode + elementHash;
         }
       }
@@ -1973,11 +1771,9 @@ public class GPOUtils
     {
       float[] floatArray = gpo.getFieldsFloat();
       int[] srcIndex = indexSubset.fieldsFloatIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           hashCode = hashMultiplier * hashCode + Float.floatToIntBits(floatArray[srcIndex[index]]);
@@ -1988,15 +1784,13 @@ public class GPOUtils
     {
       double[] doubleArray = gpo.getFieldsDouble();
       int[] srcIndex = indexSubset.fieldsDoubleIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
           long element = Double.doubleToLongBits(doubleArray[srcIndex[index]]);
-          int elementHash = (int) (element ^ (element >>> 32));
+          int elementHash = (int)(element ^ (element >>> 32));
           hashCode = hashMultiplier * hashCode + elementHash;
         }
       }
@@ -2005,11 +1799,9 @@ public class GPOUtils
     {
       Object[] objectArray = gpo.getFieldsObject();
       int[] srcIndex = indexSubset.fieldsObjectIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
 
@@ -2021,17 +1813,14 @@ public class GPOUtils
     return hashCode;
   }
 
-  public static boolean equals(GPOMutable dest,
-                               GPOMutable src)
+  public static boolean equals(GPOMutable dest, GPOMutable src)
   {
     {
       String[] destString = dest.getFieldsString();
       String[] srcString = src.getFieldsString();
-      if(destString != null) {
-        for(int index = 0;
-            index < srcString.length;
-            index++) {
-          if(!destString[index].equals(srcString[index])) {
+      if (destString != null) {
+        for (int index = 0; index < srcString.length; index++) {
+          if (!destString[index].equals(srcString[index])) {
             return false;
           }
         }
@@ -2041,11 +1830,9 @@ public class GPOUtils
     {
       boolean[] destBoolean = dest.getFieldsBoolean();
       boolean[] srcBoolean = src.getFieldsBoolean();
-      if(destBoolean != null) {
-        for(int index = 0;
-            index < srcBoolean.length;
-            index++) {
-          if(destBoolean[index] != srcBoolean[index]) {
+      if (destBoolean != null) {
+        for (int index = 0; index < srcBoolean.length; index++) {
+          if (destBoolean[index] != srcBoolean[index]) {
             return false;
           }
         }
@@ -2055,11 +1842,9 @@ public class GPOUtils
     {
       char[] destChar = dest.getFieldsCharacter();
       char[] srcChar = src.getFieldsCharacter();
-      if(destChar != null) {
-        for(int index = 0;
-            index < srcChar.length;
-            index++) {
-          if(destChar[index] != srcChar[index]) {
+      if (destChar != null) {
+        for (int index = 0; index < srcChar.length; index++) {
+          if (destChar[index] != srcChar[index]) {
             return false;
           }
         }
@@ -2069,11 +1854,9 @@ public class GPOUtils
     {
       byte[] destByte = dest.getFieldsByte();
       byte[] srcByte = src.getFieldsByte();
-      if(destByte != null) {
-        for(int index = 0;
-            index < srcByte.length;
-            index++) {
-          if(destByte[index] != srcByte[index]) {
+      if (destByte != null) {
+        for (int index = 0; index < srcByte.length; index++) {
+          if (destByte[index] != srcByte[index]) {
             return false;
           }
         }
@@ -2083,11 +1866,9 @@ public class GPOUtils
     {
       short[] destShort = dest.getFieldsShort();
       short[] srcShort = src.getFieldsShort();
-      if(destShort != null) {
-        for(int index = 0;
-            index < srcShort.length;
-            index++) {
-          if(destShort[index] != srcShort[index]) {
+      if (destShort != null) {
+        for (int index = 0; index < srcShort.length; index++) {
+          if (destShort[index] != srcShort[index]) {
             return false;
           }
         }
@@ -2097,11 +1878,9 @@ public class GPOUtils
     {
       int[] destInteger = dest.getFieldsInteger();
       int[] srcInteger = src.getFieldsInteger();
-      if(destInteger != null) {
-        for(int index = 0;
-            index < srcInteger.length;
-            index++) {
-          if(destInteger[index] != srcInteger[index]) {
+      if (destInteger != null) {
+        for (int index = 0; index < srcInteger.length; index++) {
+          if (destInteger[index] != srcInteger[index]) {
             return false;
           }
         }
@@ -2111,11 +1890,9 @@ public class GPOUtils
     {
       long[] destLong = dest.getFieldsLong();
       long[] srcLong = src.getFieldsLong();
-      if(destLong != null) {
-        for(int index = 0;
-            index < srcLong.length;
-            index++) {
-          if(destLong[index] != srcLong[index]) {
+      if (destLong != null) {
+        for (int index = 0; index < srcLong.length; index++) {
+          if (destLong[index] != srcLong[index]) {
             return false;
           }
         }
@@ -2125,11 +1902,9 @@ public class GPOUtils
     {
       float[] destFloat = dest.getFieldsFloat();
       float[] srcFloat = src.getFieldsFloat();
-      if(destFloat != null) {
-        for(int index = 0;
-            index < srcFloat.length;
-            index++) {
-          if(destFloat[index] != srcFloat[index]) {
+      if (destFloat != null) {
+        for (int index = 0; index < srcFloat.length; index++) {
+          if (destFloat[index] != srcFloat[index]) {
             return false;
           }
         }
@@ -2139,11 +1914,9 @@ public class GPOUtils
     {
       double[] destDouble = dest.getFieldsDouble();
       double[] srcDouble = src.getFieldsDouble();
-      if(destDouble != null) {
-        for(int index = 0;
-            index < srcDouble.length;
-            index++) {
-          if(destDouble[index] != srcDouble[index]) {
+      if (destDouble != null) {
+        for (int index = 0; index < srcDouble.length; index++) {
+          if (destDouble[index] != srcDouble[index]) {
             return false;
           }
         }
@@ -2153,11 +1926,9 @@ public class GPOUtils
     {
       Object[] destObject = dest.getFieldsObject();
       Object[] srcObject = src.getFieldsObject();
-      if(destObject != null) {
-        for(int index = 0;
-            index < srcObject.length;
-            index++) {
-          if(!destObject[index].equals(srcObject[index])) {
+      if (destObject != null) {
+        for (int index = 0; index < srcObject.length; index++) {
+          if (!destObject[index].equals(srcObject[index])) {
             return false;
           }
         }
@@ -2167,22 +1938,18 @@ public class GPOUtils
     return true;
   }
 
-  public static boolean subsetEquals(GPOMutable dest,
-                                     GPOMutable src,
-                                     IndexSubset indexSubset)
+  public static boolean subsetEquals(GPOMutable dest, GPOMutable src, IndexSubset indexSubset)
   {
     {
       String[] destString = dest.getFieldsString();
       String[] srcString = src.getFieldsString();
       int[] srcIndex = indexSubset.fieldsStringIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(!destString[srcIndex[index]].equals(srcString[srcIndex[index]])) {
+          if (!destString[srcIndex[index]].equals(srcString[srcIndex[index]])) {
             return false;
           }
         }
@@ -2193,14 +1960,12 @@ public class GPOUtils
       boolean[] destBoolean = dest.getFieldsBoolean();
       boolean[] srcBoolean = src.getFieldsBoolean();
       int[] srcIndex = indexSubset.fieldsBooleanIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destBoolean[srcIndex[index]] != srcBoolean[srcIndex[index]]) {
+          if (destBoolean[srcIndex[index]] != srcBoolean[srcIndex[index]]) {
             return false;
           }
         }
@@ -2211,14 +1976,12 @@ public class GPOUtils
       char[] destChar = dest.getFieldsCharacter();
       char[] srcChar = src.getFieldsCharacter();
       int[] srcIndex = indexSubset.fieldsBooleanIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destChar[srcIndex[index]] != srcChar[srcIndex[index]]) {
+          if (destChar[srcIndex[index]] != srcChar[srcIndex[index]]) {
             return false;
           }
         }
@@ -2229,14 +1992,12 @@ public class GPOUtils
       byte[] destByte = dest.getFieldsByte();
       byte[] srcByte = src.getFieldsByte();
       int[] srcIndex = indexSubset.fieldsByteIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destByte[srcIndex[index]] != srcByte[srcIndex[index]]) {
+          if (destByte[srcIndex[index]] != srcByte[srcIndex[index]]) {
             return false;
           }
         }
@@ -2247,14 +2008,12 @@ public class GPOUtils
       short[] destShort = dest.getFieldsShort();
       short[] srcShort = src.getFieldsShort();
       int[] srcIndex = indexSubset.fieldsShortIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destShort[srcIndex[index]] != srcShort[srcIndex[index]]) {
+          if (destShort[srcIndex[index]] != srcShort[srcIndex[index]]) {
             return false;
           }
         }
@@ -2265,14 +2024,12 @@ public class GPOUtils
       int[] destInteger = dest.getFieldsInteger();
       int[] srcInteger = src.getFieldsInteger();
       int[] srcIndex = indexSubset.fieldsIntegerIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destInteger[srcIndex[index]] != srcInteger[srcIndex[index]]) {
+          if (destInteger[srcIndex[index]] != srcInteger[srcIndex[index]]) {
             return false;
           }
         }
@@ -2283,14 +2040,12 @@ public class GPOUtils
       long[] destLong = dest.getFieldsLong();
       long[] srcLong = src.getFieldsLong();
       int[] srcIndex = indexSubset.fieldsLongIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destLong[srcIndex[index]] != srcLong[srcIndex[index]]) {
+          if (destLong[srcIndex[index]] != srcLong[srcIndex[index]]) {
             return false;
           }
         }
@@ -2301,14 +2056,12 @@ public class GPOUtils
       float[] destFloat = dest.getFieldsFloat();
       float[] srcFloat = src.getFieldsFloat();
       int[] srcIndex = indexSubset.fieldsFloatIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destFloat[srcIndex[index]] != srcFloat[srcIndex[index]]) {
+          if (destFloat[srcIndex[index]] != srcFloat[srcIndex[index]]) {
             return false;
           }
         }
@@ -2319,14 +2072,12 @@ public class GPOUtils
       double[] destDouble = dest.getFieldsDouble();
       double[] srcDouble = src.getFieldsDouble();
       int[] srcIndex = indexSubset.fieldsDoubleIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destDouble[srcIndex[index]] != srcDouble[srcIndex[index]]) {
+          if (destDouble[srcIndex[index]] != srcDouble[srcIndex[index]]) {
             return false;
           }
         }
@@ -2337,14 +2088,12 @@ public class GPOUtils
       Object[] destObject = dest.getFieldsObject();
       Object[] srcObject = src.getFieldsObject();
       int[] srcIndex = indexSubset.fieldsObjectIndexSubset;
-      if(srcIndex != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (srcIndex != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(!destObject[srcIndex[index]].equals(srcObject[srcIndex[index]])) {
+          if (!destObject[srcIndex[index]].equals(srcObject[srcIndex[index]])) {
             return false;
           }
         }
@@ -2354,22 +2103,18 @@ public class GPOUtils
     return true;
   }
 
-  public static boolean indirectEquals(GPOMutable dest,
-                                       GPOMutable src,
-                                       IndexSubset indexSubset)
+  public static boolean indirectEquals(GPOMutable dest, GPOMutable src, IndexSubset indexSubset)
   {
     {
       String[] destString = dest.getFieldsString();
       String[] srcString = src.getFieldsString();
       int[] srcIndex = indexSubset.fieldsStringIndexSubset;
-      if(destString != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destString != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(!destString[index].equals(srcString[srcIndex[index]])) {
+          if (!destString[index].equals(srcString[srcIndex[index]])) {
             return false;
           }
         }
@@ -2380,14 +2125,12 @@ public class GPOUtils
       boolean[] destBoolean = dest.getFieldsBoolean();
       boolean[] srcBoolean = src.getFieldsBoolean();
       int[] srcIndex = indexSubset.fieldsBooleanIndexSubset;
-      if(destBoolean != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destBoolean != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destBoolean[index] != srcBoolean[srcIndex[index]]) {
+          if (destBoolean[index] != srcBoolean[srcIndex[index]]) {
             return false;
           }
         }
@@ -2398,14 +2141,12 @@ public class GPOUtils
       char[] destChar = dest.getFieldsCharacter();
       char[] srcChar = src.getFieldsCharacter();
       int[] srcIndex = indexSubset.fieldsBooleanIndexSubset;
-      if(destChar != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destChar != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destChar[index] != srcChar[srcIndex[index]]) {
+          if (destChar[index] != srcChar[srcIndex[index]]) {
             return false;
           }
         }
@@ -2416,14 +2157,12 @@ public class GPOUtils
       byte[] destByte = dest.getFieldsByte();
       byte[] srcByte = src.getFieldsByte();
       int[] srcIndex = indexSubset.fieldsByteIndexSubset;
-      if(destByte != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destByte != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destByte[index] != srcByte[srcIndex[index]]) {
+          if (destByte[index] != srcByte[srcIndex[index]]) {
             return false;
           }
         }
@@ -2434,14 +2173,12 @@ public class GPOUtils
       short[] destShort = dest.getFieldsShort();
       short[] srcShort = src.getFieldsShort();
       int[] srcIndex = indexSubset.fieldsShortIndexSubset;
-      if(destShort != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destShort != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destShort[index] != srcShort[srcIndex[index]]) {
+          if (destShort[index] != srcShort[srcIndex[index]]) {
             return false;
           }
         }
@@ -2452,14 +2189,12 @@ public class GPOUtils
       int[] destInteger = dest.getFieldsInteger();
       int[] srcInteger = src.getFieldsInteger();
       int[] srcIndex = indexSubset.fieldsIntegerIndexSubset;
-      if(destInteger != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destInteger != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destInteger[index] != srcInteger[srcIndex[index]]) {
+          if (destInteger[index] != srcInteger[srcIndex[index]]) {
             return false;
           }
         }
@@ -2470,14 +2205,12 @@ public class GPOUtils
       long[] destLong = dest.getFieldsLong();
       long[] srcLong = src.getFieldsLong();
       int[] srcIndex = indexSubset.fieldsLongIndexSubset;
-      if(destLong != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destLong != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destLong[index] != srcLong[srcIndex[index]]) {
+          if (destLong[index] != srcLong[srcIndex[index]]) {
             return false;
           }
         }
@@ -2488,14 +2221,12 @@ public class GPOUtils
       float[] destFloat = dest.getFieldsFloat();
       float[] srcFloat = src.getFieldsFloat();
       int[] srcIndex = indexSubset.fieldsFloatIndexSubset;
-      if(destFloat != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destFloat != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destFloat[index] != srcFloat[srcIndex[index]]) {
+          if (destFloat[index] != srcFloat[srcIndex[index]]) {
             return false;
           }
         }
@@ -2506,14 +2237,12 @@ public class GPOUtils
       double[] destDouble = dest.getFieldsDouble();
       double[] srcDouble = src.getFieldsDouble();
       int[] srcIndex = indexSubset.fieldsDoubleIndexSubset;
-      if(destDouble != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destDouble != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(destDouble[index] != srcDouble[srcIndex[index]]) {
+          if (destDouble[index] != srcDouble[srcIndex[index]]) {
             return false;
           }
         }
@@ -2524,14 +2253,12 @@ public class GPOUtils
       Object[] destObject = dest.getFieldsObject();
       Object[] srcObject = src.getFieldsObject();
       int[] srcIndex = indexSubset.fieldsObjectIndexSubset;
-      if(destObject != null) {
-        for(int index = 0;
-            index < srcIndex.length;
-            index++) {
-          if(srcIndex[index] == -1) {
+      if (destObject != null) {
+        for (int index = 0; index < srcIndex.length; index++) {
+          if (srcIndex[index] == -1) {
             continue;
           }
-          if(!destObject[index].equals(srcObject[srcIndex[index]])) {
+          if (!destObject[index].equals(srcObject[srcIndex[index]])) {
             return false;
           }
         }
@@ -2543,63 +2270,58 @@ public class GPOUtils
 
   public static void zeroFillNumeric(GPOMutable value)
   {
-    if(value.getFieldsByte() != null) {
-      Arrays.fill(value.getFieldsByte(), (byte) 0);
+    if (value.getFieldsByte() != null) {
+      Arrays.fill(value.getFieldsByte(), (byte)0);
     }
 
-    if(value.getFieldsShort() != null) {
-      Arrays.fill(value.getFieldsShort(), (short) 0);
+    if (value.getFieldsShort() != null) {
+      Arrays.fill(value.getFieldsShort(), (short)0);
     }
 
-    if(value.getFieldsInteger() != null) {
+    if (value.getFieldsInteger() != null) {
       Arrays.fill(value.getFieldsInteger(), 0);
     }
 
-    if(value.getFieldsLong() != null) {
+    if (value.getFieldsLong() != null) {
       Arrays.fill(value.getFieldsLong(), 0L);
     }
 
-    if(value.getFieldsFloat() != null) {
+    if (value.getFieldsFloat() != null) {
       Arrays.fill(value.getFieldsFloat(), 0.0f);
     }
 
-    if(value.getFieldsDouble() != null) {
+    if (value.getFieldsDouble() != null) {
       Arrays.fill(value.getFieldsDouble(), 0.0);
     }
   }
 
-  public static IndexSubset computeSubIndices(FieldsDescriptor child,
-                                              FieldsDescriptor parent)
+  public static IndexSubset computeSubIndices(FieldsDescriptor child, FieldsDescriptor parent)
   {
     IndexSubset indexSubset = new IndexSubset();
 
-    for(Map.Entry<Type, List<String>> entry: child.getTypeToFields().entrySet()) {
+    for (Map.Entry<Type, List<String>> entry : child.getTypeToFields().entrySet()) {
       Type type = entry.getKey();
       List<String> childFields = entry.getValue();
       List<String> parentFields = parent.getTypeToFields().get(type);
 
       int size = child.getTypeToSize().get(type);
       int[] indices;
-      if(child.getTypeToFields().get(type) != null &&
-         child.getCompressedTypes().contains(type)) {
+      if (child.getTypeToFields().get(type) != null &&
+          child.getCompressedTypes().contains(type)) {
         indices = new int[1];
-      }
-      else {
+      } else {
         indices = new int[size];
 
-        for(int index = 0;
-            index < size;
-            index++) {
-          if(parentFields == null) {
+        for (int index = 0; index < size; index++) {
+          if (parentFields == null) {
             indices[index] = -1;
-          }
-          else {
+          } else {
             indices[index] = parentFields.indexOf(childFields.get(index));
           }
         }
       }
 
-      switch(type) {
+      switch (type) {
         case BOOLEAN: {
           indexSubset.fieldsBooleanIndexSubset = indices;
           break;
@@ -2702,24 +2424,27 @@ public class GPOUtils
     @Override
     public String toString()
     {
-      return "IndexSubset{" + "fieldsBooleanIndexSubset=" + fieldsBooleanIndexSubset + ", fieldsCharacterIndexSubset=" + fieldsCharacterIndexSubset + ", fieldsByteIndexSubset=" + fieldsByteIndexSubset + ", fieldsShortIndexSubset=" + fieldsShortIndexSubset + ", fieldsIntegerIndexSubset=" + fieldsIntegerIndexSubset + ", fieldsLongIndexSubset=" + fieldsLongIndexSubset + ", fieldsFloatIndexSubset=" + fieldsFloatIndexSubset + ", fieldsDoubleIndexSubset=" + fieldsDoubleIndexSubset + ", fieldsStringIndexSubset=" + fieldsStringIndexSubset + '}';
+      return "IndexSubset{" + "fieldsBooleanIndexSubset=" + fieldsBooleanIndexSubset + ", fieldsCharacterIndexSubset="
+          + fieldsCharacterIndexSubset + ", fieldsByteIndexSubset=" + fieldsByteIndexSubset
+          + ", fieldsShortIndexSubset=" + fieldsShortIndexSubset + ", fieldsIntegerIndexSubset="
+          + fieldsIntegerIndexSubset + ", fieldsLongIndexSubset=" + fieldsLongIndexSubset + ", fieldsFloatIndexSubset="
+          + fieldsFloatIndexSubset + ", fieldsDoubleIndexSubset=" + fieldsDoubleIndexSubset
+          + ", fieldsStringIndexSubset=" + fieldsStringIndexSubset + '}';
     }
   }
 
-  public static Map<String, Object> getDestringedData(FieldsDescriptor fd,
-                                                      Map<String, String> stringMap)
+  public static Map<String, Object> getDestringedData(FieldsDescriptor fd, Map<String, String> stringMap)
   {
     Map<String, Object> fieldToData = Maps.newHashMap();
     Map<String, Type> fieldToType = fd.getFieldToType();
 
-    for(Map.Entry<String, String> entry: stringMap.entrySet()) {
+    for (Map.Entry<String, String> entry : stringMap.entrySet()) {
       Object objValue;
       String valueString = entry.getValue();
       Type valueType = fieldToType.get(entry.getKey());
 
-      switch(valueType) {
-        case BOOLEAN:
-        {
+      switch (valueType) {
+        case BOOLEAN: {
           objValue = Boolean.valueOf(valueString);
           break;
         }
@@ -2774,8 +2499,8 @@ public class GPOUtils
   {
     Map<String, Object> values = Maps.newHashMap();
 
-    for(String field: fields.getFields()) {
-      if(!gpo.getFieldDescriptor().getFields().getFields().contains(field)) {
+    for (String field : fields.getFields()) {
+      if (!gpo.getFieldDescriptor().getFields().getFields().contains(field)) {
         continue;
       }
 
@@ -2792,8 +2517,9 @@ public class GPOUtils
    * @param val The value to check the range of.
    * @return True if the given int value is within the range of the specified type, false otherwise.
    */
-  public static boolean insideRange(Type type, int val) {
-    switch(type) {
+  public static boolean insideRange(Type type, int val)
+  {
+    switch (type) {
       case BYTE: {
         return !(val < (int)Byte.MIN_VALUE || val > (int)Byte.MAX_VALUE);
       }



[05/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/ApacheLogParseOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/ApacheLogParseOperatorTest.java b/library/src/test/java/com/datatorrent/lib/logs/ApacheLogParseOperatorTest.java
index c86eae3..7b44751 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/ApacheLogParseOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/ApacheLogParseOperatorTest.java
@@ -30,60 +30,60 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class ApacheLogParseOperatorTest
 {
-	private static Logger log = LoggerFactory
-			.getLogger(ApacheLogParseOperatorTest.class);
+  private static Logger log = LoggerFactory
+      .getLogger(ApacheLogParseOperatorTest.class);
 
-	/**
-	 * Test oper logic emits correct results
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testNodeProcessing()
-	{
+  /**
+   * Test oper logic emits correct results
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
+  public void testNodeProcessing()
+  {
 
-		ApacheLogParseOperator oper = new ApacheLogParseOperator();
-		CollectorTestSink ipSink = new CollectorTestSink();
-		CollectorTestSink urlSink = new CollectorTestSink();
-		CollectorTestSink scSink = new CollectorTestSink();
-		CollectorTestSink bytesSink = new CollectorTestSink();
-		CollectorTestSink refSink = new CollectorTestSink();
-		CollectorTestSink agentSink = new CollectorTestSink();
+    ApacheLogParseOperator oper = new ApacheLogParseOperator();
+    CollectorTestSink ipSink = new CollectorTestSink();
+    CollectorTestSink urlSink = new CollectorTestSink();
+    CollectorTestSink scSink = new CollectorTestSink();
+    CollectorTestSink bytesSink = new CollectorTestSink();
+    CollectorTestSink refSink = new CollectorTestSink();
+    CollectorTestSink agentSink = new CollectorTestSink();
 
-		oper.outputIPAddress.setSink(ipSink);
-		oper.outputUrl.setSink(urlSink);
-		oper.outputStatusCode.setSink(scSink);
-		oper.outputBytes.setSink(bytesSink);
-		oper.outputReferer.setSink(refSink);
-		oper.outputAgent.setSink(agentSink);
+    oper.outputIPAddress.setSink(ipSink);
+    oper.outputUrl.setSink(urlSink);
+    oper.outputStatusCode.setSink(scSink);
+    oper.outputBytes.setSink(bytesSink);
+    oper.outputReferer.setSink(refSink);
+    oper.outputAgent.setSink(agentSink);
 
-		String token = "127.0.0.1 - - [04/Apr/2013:17:17:21 -0700] \"GET /favicon.ico HTTP/1.1\" 404 498 \"-\" \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31\"";
-		oper.beginWindow(0);
-		oper.data.process(token);
-		oper.endWindow(); //
+    String token = "127.0.0.1 - - [04/Apr/2013:17:17:21 -0700] \"GET /favicon.ico HTTP/1.1\" 404 498 \"-\" \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31\"";
+    oper.beginWindow(0);
+    oper.data.process(token);
+    oper.endWindow(); //
 
-		Assert.assertEquals("number emitted tuples", 1,
-				ipSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				urlSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				scSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				bytesSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				refSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				agentSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        ipSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        urlSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        scSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        bytesSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        refSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        agentSink.collectedTuples.size());
 
-		log.debug(String.format("\nLine is \"%s\"", token));
-		log.debug(String.format("IP is %s\n", ipSink.collectedTuples.toString()));
-		log.debug(String.format("Url is %s\n", urlSink.collectedTuples.toString()));
-		log.debug(String.format("Status code is %s\n",
-				scSink.collectedTuples.toString()));
-		log.debug(String.format("Bytes are %s\n",
-				bytesSink.collectedTuples.toString()));
-		log.debug(String.format("Referer is %s\n",
-				refSink.collectedTuples.toString()));
-		log.debug(String.format("Agent is %s\n",
-				agentSink.collectedTuples.toString()));
-	}
+    log.debug(String.format("\nLine is \"%s\"", token));
+    log.debug(String.format("IP is %s\n", ipSink.collectedTuples.toString()));
+    log.debug(String.format("Url is %s\n", urlSink.collectedTuples.toString()));
+    log.debug(String.format("Status code is %s\n",
+        scSink.collectedTuples.toString()));
+    log.debug(String.format("Bytes are %s\n",
+        bytesSink.collectedTuples.toString()));
+    log.debug(String.format("Referer is %s\n",
+        refSink.collectedTuples.toString()));
+    log.debug(String.format("Agent is %s\n",
+        agentSink.collectedTuples.toString()));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayListTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayListTest.java b/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayListTest.java
index 10a80a4..1350218 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayListTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayListTest.java
@@ -50,7 +50,7 @@ public class FilteredLineToTokenArrayListTest
     oper.setSplitTokenBy(",");
     oper.tokens.setSink(tokenSink);
     oper.splittokens.setSink(stokenSink);
-    String [] filters = new String[2];
+    String[] filters = new String[2];
     filters[0] = "a";
     filters[1] = "c";
     oper.setFilterBy(filters);
@@ -73,9 +73,9 @@ public class FilteredLineToTokenArrayListTest
     Assert.assertEquals("number emitted tuples", numTuples, tokenSink.getCount("c,4,5,6"));
     Assert.assertEquals("number emitted tuples", numTuples, tokenSink.getCount("d"));
     HashMap<Object, Object> smap = stokenSink.map;
-    for (Map.Entry<Object, Object> e: smap.entrySet()) {
+    for (Map.Entry<Object, Object> e : smap.entrySet()) {
       HashMap<String, ArrayList<String>> item = (HashMap<String, ArrayList<String>>)e.getKey();
-      for (Map.Entry<String, ArrayList<String>> l: item.entrySet()) {
+      for (Map.Entry<String, ArrayList<String>> l : item.entrySet()) {
         String key = l.getKey();
         ArrayList<String> list = l.getValue();
         Assert.assertTrue(!key.equals("b"));
@@ -84,8 +84,7 @@ public class FilteredLineToTokenArrayListTest
           Assert.assertEquals("number emitted values for \"a\"", 2, list.size());
           Assert.assertEquals("first value for \"a\"", "2", list.get(0));
           Assert.assertEquals("second value for \"a\"", "3", list.get(1));
-        }
-        else if (key.equals("c")) {
+        } else if (key.equals("c")) {
           Assert.assertEquals("number emitted values for \"c\"", 3, list.size());
           Assert.assertEquals("first value for \"c\"", "4", list.get(0));
           Assert.assertEquals("second value for \"c\"", "5", list.get(1));

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java b/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java
index 636eaae..532aef5 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java
@@ -34,58 +34,58 @@ import com.datatorrent.lib.testbench.HashTestSink;
  */
 public class FilteredLineToTokenHashMapTest
 {
-	/**
-	 * Test oper logic emits correct results
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testNodeProcessing()
-	{
+  /**
+   * Test oper logic emits correct results
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
+  public void testNodeProcessing()
+  {
 
-		FilteredLineToTokenHashMap oper = new FilteredLineToTokenHashMap();
-		HashTestSink tokenSink = new HashTestSink();
+    FilteredLineToTokenHashMap oper = new FilteredLineToTokenHashMap();
+    HashTestSink tokenSink = new HashTestSink();
 
-		oper.setSplitBy(";");
-		oper.setSplitTokenBy(",");
-		oper.tokens.setSink(tokenSink);
-		String[] filters = new String[2];
-		filters[0] = "a";
-		filters[1] = "c";
-		oper.setFilterBy(filters);
+    oper.setSplitBy(";");
+    oper.setSplitTokenBy(",");
+    oper.tokens.setSink(tokenSink);
+    String[] filters = new String[2];
+    filters[0] = "a";
+    filters[1] = "c";
+    oper.setFilterBy(filters);
 
-		oper.beginWindow(0); //
-		String input1 = "a,2,3;b,1,2;c,4,5,6";
-		String input2 = "d";
-		String input3 = "";
-		int numTuples = 1000;
-		for (int i = 0; i < numTuples; i++) {
-			oper.data.process(input1);
-			oper.data.process(input2);
-			oper.data.process(input3);
-		}
-		oper.endWindow(); //
-		Assert.assertEquals("number emitted tuples", 2, tokenSink.map.size());
-		HashMap<HashMap<String, ArrayList<String>>, Object> smap = tokenSink.map;
-		for (Map.Entry<HashMap<String, ArrayList<String>>, Object> e : smap
-				.entrySet()) {
-			for (Map.Entry<String, ArrayList<String>> l : e.getKey().entrySet()) {
-				String key = l.getKey();
-				ArrayList<String> list = l.getValue();
-				Assert.assertTrue(!key.equals("b"));
-				Assert.assertTrue(!key.equals("d"));
-				if (key.equals("a")) {
-					Assert
-							.assertEquals("number emitted values for \"a\"", 2, list.size());
-					Assert.assertEquals("first value for \"a\"", "2", list.get(0));
-					Assert.assertEquals("second value for \"a\"", "3", list.get(1));
-				} else if (key.equals("c")) {
-					Assert
-							.assertEquals("number emitted values for \"c\"", 3, list.size());
-					Assert.assertEquals("first value for \"c\"", "4", list.get(0));
-					Assert.assertEquals("second value for \"c\"", "5", list.get(1));
-					Assert.assertEquals("second value for \"c\"", "6", list.get(2));
-				}
-			}
-		}
-	}
+    oper.beginWindow(0); //
+    String input1 = "a,2,3;b,1,2;c,4,5,6";
+    String input2 = "d";
+    String input3 = "";
+    int numTuples = 1000;
+    for (int i = 0; i < numTuples; i++) {
+      oper.data.process(input1);
+      oper.data.process(input2);
+      oper.data.process(input3);
+    }
+    oper.endWindow(); //
+    Assert.assertEquals("number emitted tuples", 2, tokenSink.map.size());
+    HashMap<HashMap<String, ArrayList<String>>, Object> smap = tokenSink.map;
+    for (Map.Entry<HashMap<String, ArrayList<String>>, Object> e : smap
+        .entrySet()) {
+      for (Map.Entry<String, ArrayList<String>> l : e.getKey().entrySet()) {
+        String key = l.getKey();
+        ArrayList<String> list = l.getValue();
+        Assert.assertTrue(!key.equals("b"));
+        Assert.assertTrue(!key.equals("d"));
+        if (key.equals("a")) {
+          Assert
+              .assertEquals("number emitted values for \"a\"", 2, list.size());
+          Assert.assertEquals("first value for \"a\"", "2", list.get(0));
+          Assert.assertEquals("second value for \"a\"", "3", list.get(1));
+        } else if (key.equals("c")) {
+          Assert
+              .assertEquals("number emitted values for \"c\"", 3, list.size());
+          Assert.assertEquals("first value for \"c\"", "4", list.get(0));
+          Assert.assertEquals("second value for \"c\"", "5", list.get(1));
+          Assert.assertEquals("second value for \"c\"", "6", list.get(2));
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyValTest.java b/library/src/test/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyValTest.java
index 7b35276..b8e5b7d 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyValTest.java
@@ -37,7 +37,7 @@ public class FilteredLineTokenizerKeyValTest
    * Test oper logic emits correct results
    */
   @SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @Test
   public void testNodeProcessing()
   {
 
@@ -47,7 +47,7 @@ public class FilteredLineTokenizerKeyValTest
     oper.setSplitBy(",");
     oper.setSplitTokenBy("=");
     oper.tokens.setSink(tokenSink);
-    String [] filters = new String[2];
+    String[] filters = new String[2];
     filters[0] = "a";
     filters[1] = "c";
     oper.setFilterBy(filters);
@@ -66,17 +66,16 @@ public class FilteredLineTokenizerKeyValTest
     oper.endWindow(); //
     Assert.assertEquals("number emitted tuples", 2, tokenSink.map.size());
     HashMap<Object, Object> smap = tokenSink.map;
-    for (Map.Entry<Object, Object> e: smap.entrySet()) {
+    for (Map.Entry<Object, Object> e : smap.entrySet()) {
       HashMap<String, String> kmap = (HashMap<String, String>)e.getKey();
-      for (Map.Entry<String, String> o: kmap.entrySet()) {
+      for (Map.Entry<String, String> o : kmap.entrySet()) {
         String key = o.getKey();
         String val = o.getValue();
         Assert.assertTrue(!key.equals("b"));
         Assert.assertTrue(!key.equals("d"));
         if (key.equals("a")) {
           Assert.assertEquals("value of \"a\"", "2", val);
-        }
-        else if (key.equals("c")) {
+        } else if (key.equals("c")) {
           Assert.assertEquals("value of \"c\"", "4", val);
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java b/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java
index a4f1a4e..92575b7 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java
@@ -36,72 +36,68 @@ import com.datatorrent.lib.testbench.ArrayListTestSink;
  */
 public class LineToTokenArrayListTest
 {
-	/**
-	 * Test oper logic emits correct results
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testNodeProcessing()
-	{
+  /**
+   * Test oper logic emits correct results
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
+  public void testNodeProcessing()
+  {
 
-		LineToTokenArrayList oper = new LineToTokenArrayList();
-		ArrayListTestSink tokenSink = new ArrayListTestSink();
-		ArrayListTestSink stokenSink = new ArrayListTestSink();
+    LineToTokenArrayList oper = new LineToTokenArrayList();
+    ArrayListTestSink tokenSink = new ArrayListTestSink();
+    ArrayListTestSink stokenSink = new ArrayListTestSink();
 
-		oper.setSplitBy(";");
-		oper.setSplitTokenBy(",");
-		oper.tokens.setSink(tokenSink);
-		oper.splittokens.setSink(stokenSink);
-		oper.beginWindow(0); //
+    oper.setSplitBy(";");
+    oper.setSplitTokenBy(",");
+    oper.tokens.setSink(tokenSink);
+    oper.splittokens.setSink(stokenSink);
+    oper.beginWindow(0); //
 
-		String input1 = "a,2,3;b,1,2;c,4,5,6";
-		String input2 = "d";
-		String input3 = "";
-		int numTuples = 1000;
-		for (int i = 0; i < numTuples; i++) {
-			oper.data.process(input1);
-			oper.data.process(input2);
-			oper.data.process(input3);
-		}
-		oper.endWindow(); //
-		Assert.assertEquals("number emitted tuples", 4, tokenSink.map.size());
-		Assert.assertEquals("number emitted tuples", 4, stokenSink.map.size());
-		Assert.assertEquals("number emitted tuples", numTuples,
-				tokenSink.getCount("a,2,3"));
-		Assert.assertEquals("number emitted tuples", numTuples,
-				tokenSink.getCount("b,1,2"));
-		Assert.assertEquals("number emitted tuples", numTuples,
-				tokenSink.getCount("c,4,5,6"));
-		Assert.assertEquals("number emitted tuples", numTuples,
-				tokenSink.getCount("d"));
-		HashMap<Object, Object> smap = stokenSink.map;
-		for (Map.Entry<Object, Object> e : smap.entrySet()) {
-			HashMap<String, ArrayList<String>> item = (HashMap<String, ArrayList<String>>) e
-					.getKey();
-			for (Map.Entry<String, ArrayList<String>> l : item.entrySet()) {
-				String key = l.getKey();
-				ArrayList<String> list = l.getValue();
-				if (key.equals("a")) {
-					Assert
-							.assertEquals("number emitted values for \"a\"", 2, list.size());
-					Assert.assertEquals("first value for \"a\"", "2", list.get(0));
-					Assert.assertEquals("second value for \"a\"", "3", list.get(1));
-				} else if (key.equals("b")) {
-					Assert
-							.assertEquals("number emitted values for \"b\"", 2, list.size());
-					Assert.assertEquals("first value for \"b\"", "1", list.get(0));
-					Assert.assertEquals("second value for \"b\"", "2", list.get(1));
-				} else if (key.equals("c")) {
-					Assert
-							.assertEquals("number emitted values for \"c\"", 3, list.size());
-					Assert.assertEquals("first value for \"c\"", "4", list.get(0));
-					Assert.assertEquals("second value for \"c\"", "5", list.get(1));
-					Assert.assertEquals("second value for \"c\"", "6", list.get(2));
-				} else if (key.equals("d")) {
-					Assert
-							.assertEquals("number emitted values for \"d\"", 0, list.size());
-				}
-			}
-		}
-	}
+    String input1 = "a,2,3;b,1,2;c,4,5,6";
+    String input2 = "d";
+    String input3 = "";
+    int numTuples = 1000;
+    for (int i = 0; i < numTuples; i++) {
+      oper.data.process(input1);
+      oper.data.process(input2);
+      oper.data.process(input3);
+    }
+    oper.endWindow(); //
+
+    Assert.assertEquals("number emitted tuples", 4, tokenSink.map.size());
+    Assert.assertEquals("number emitted tuples", 4, stokenSink.map.size());
+    Assert.assertEquals("number emitted tuples", numTuples, tokenSink.getCount("a,2,3"));
+    Assert.assertEquals("number emitted tuples", numTuples, tokenSink.getCount("b,1,2"));
+    Assert.assertEquals("number emitted tuples", numTuples, tokenSink.getCount("c,4,5,6"));
+    Assert.assertEquals("number emitted tuples", numTuples, tokenSink.getCount("d"));
+
+    HashMap<Object, Object> smap = stokenSink.map;
+    for (Map.Entry<Object, Object> e : smap.entrySet()) {
+      HashMap<String, ArrayList<String>> item = (HashMap<String, ArrayList<String>>)e.getKey();
+      for (Map.Entry<String, ArrayList<String>> l : item.entrySet()) {
+        String key = l.getKey();
+        ArrayList<String> list = l.getValue();
+        if (key.equals("a")) {
+          Assert.assertEquals("number emitted values for \"a\"", 2, list.size());
+          Assert.assertEquals("first value for \"a\"", "2", list.get(0));
+          Assert.assertEquals("second value for \"a\"", "3", list.get(1));
+
+        } else if (key.equals("b")) {
+          Assert.assertEquals("number emitted values for \"b\"", 2, list.size());
+          Assert.assertEquals("first value for \"b\"", "1", list.get(0));
+          Assert.assertEquals("second value for \"b\"", "2", list.get(1));
+
+        } else if (key.equals("c")) {
+          Assert.assertEquals("number emitted values for \"c\"", 3, list.size());
+          Assert.assertEquals("first value for \"c\"", "4", list.get(0));
+          Assert.assertEquals("second value for \"c\"", "5", list.get(1));
+          Assert.assertEquals("second value for \"c\"", "6", list.get(2));
+
+        } else if (key.equals("d")) {
+          Assert.assertEquals("number emitted values for \"d\"", 0, list.size());
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java b/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java
index 93b64e4..20a799e 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java
@@ -33,60 +33,60 @@ import com.datatorrent.lib.testbench.HashTestSink;
  */
 public class LineToTokenHashMapTest
 {
-	/**
-	 * Test oper logic emits correct results
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testNodeProcessing()
-	{
+  /**
+   * Test oper logic emits correct results
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
+  public void testNodeProcessing()
+  {
 
-		LineToTokenHashMap oper = new LineToTokenHashMap();
-		HashTestSink tokenSink = new HashTestSink();
+    LineToTokenHashMap oper = new LineToTokenHashMap();
+    HashTestSink tokenSink = new HashTestSink();
 
-		oper.setSplitBy(";");
-		oper.setSplitTokenBy(",");
-		oper.tokens.setSink(tokenSink);
+    oper.setSplitBy(";");
+    oper.setSplitTokenBy(",");
+    oper.tokens.setSink(tokenSink);
 
-		oper.beginWindow(0); //
-		String input1 = "a,2,3;b,1,2;c,4,5,6";
-		String input2 = "d";
-		String input3 = "";
-		int numTuples = 1000;
-		for (int i = 0; i < numTuples; i++) {
-			oper.data.process(input1);
-			oper.data.process(input2);
-			oper.data.process(input3);
-		}
-		oper.endWindow(); //
-		Assert.assertEquals("number emitted tuples", 2, tokenSink.map.size());
-		HashMap<HashMap<String, ArrayList<String>>, Object> smap = tokenSink.map;
-		for (Map.Entry<HashMap<String, ArrayList<String>>, Object> e : smap
-				.entrySet()) {
-			for (Map.Entry<String, ArrayList<String>> l : e.getKey().entrySet()) {
-				String key = l.getKey();
-				ArrayList<String> list = l.getValue();
-				if (key.equals("a")) {
-					Assert
-							.assertEquals("number emitted values for \"a\"", 2, list.size());
-					Assert.assertEquals("first value for \"a\"", "2", list.get(0));
-					Assert.assertEquals("second value for \"a\"", "3", list.get(1));
-				} else if (key.equals("b")) {
-					Assert
-							.assertEquals("number emitted values for \"b\"", 2, list.size());
-					Assert.assertEquals("first value for \"b\"", "1", list.get(0));
-					Assert.assertEquals("second value for \"b\"", "2", list.get(1));
-				} else if (key.equals("c")) {
-					Assert
-							.assertEquals("number emitted values for \"c\"", 3, list.size());
-					Assert.assertEquals("first value for \"c\"", "4", list.get(0));
-					Assert.assertEquals("second value for \"c\"", "5", list.get(1));
-					Assert.assertEquals("second value for \"c\"", "6", list.get(2));
-				} else if (key.equals("d")) {
-					Assert
-							.assertEquals("number emitted values for \"d\"", 0, list.size());
-				}
-			}
-		}
-	}
+    oper.beginWindow(0); //
+    String input1 = "a,2,3;b,1,2;c,4,5,6";
+    String input2 = "d";
+    String input3 = "";
+    int numTuples = 1000;
+    for (int i = 0; i < numTuples; i++) {
+      oper.data.process(input1);
+      oper.data.process(input2);
+      oper.data.process(input3);
+    }
+    oper.endWindow(); //
+    Assert.assertEquals("number emitted tuples", 2, tokenSink.map.size());
+    HashMap<HashMap<String, ArrayList<String>>, Object> smap = tokenSink.map;
+    for (Map.Entry<HashMap<String, ArrayList<String>>, Object> e : smap
+        .entrySet()) {
+      for (Map.Entry<String, ArrayList<String>> l : e.getKey().entrySet()) {
+        String key = l.getKey();
+        ArrayList<String> list = l.getValue();
+        if (key.equals("a")) {
+          Assert
+              .assertEquals("number emitted values for \"a\"", 2, list.size());
+          Assert.assertEquals("first value for \"a\"", "2", list.get(0));
+          Assert.assertEquals("second value for \"a\"", "3", list.get(1));
+        } else if (key.equals("b")) {
+          Assert
+              .assertEquals("number emitted values for \"b\"", 2, list.size());
+          Assert.assertEquals("first value for \"b\"", "1", list.get(0));
+          Assert.assertEquals("second value for \"b\"", "2", list.get(1));
+        } else if (key.equals("c")) {
+          Assert
+              .assertEquals("number emitted values for \"c\"", 3, list.size());
+          Assert.assertEquals("first value for \"c\"", "4", list.get(0));
+          Assert.assertEquals("second value for \"c\"", "5", list.get(1));
+          Assert.assertEquals("second value for \"c\"", "6", list.get(2));
+        } else if (key.equals("d")) {
+          Assert
+              .assertEquals("number emitted values for \"d\"", 0, list.size());
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java b/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java
index cdb176a..7b6613b 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java
@@ -32,50 +32,50 @@ import com.datatorrent.lib.testbench.HashTestSink;
  */
 public class LineTokenizerKeyValTest
 {
-	/**
-	 * Test oper logic emits correct results
-	 */
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	@Test
-	public void testNodeProcessing()
-	{
+  /**
+   * Test oper logic emits correct results
+   */
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  @Test
+  public void testNodeProcessing()
+  {
 
-		LineTokenizerKeyVal oper = new LineTokenizerKeyVal();
-		HashTestSink tokenSink = new HashTestSink();
+    LineTokenizerKeyVal oper = new LineTokenizerKeyVal();
+    HashTestSink tokenSink = new HashTestSink();
 
-		oper.setSplitBy(",");
-		oper.setSplitTokenBy("=");
-		oper.tokens.setSink(tokenSink);
+    oper.setSplitBy(",");
+    oper.setSplitTokenBy("=");
+    oper.tokens.setSink(tokenSink);
 
-		oper.beginWindow(0); //
+    oper.beginWindow(0); //
 
-		String input1 = "a=2,b=3,c=4";
-		String input2 = "d=2";
-		String input3 = "";
-		int numTuples = 1000;
-		for (int i = 0; i < numTuples; i++) {
-			oper.data.process(input1);
-			oper.data.process(input2);
-			oper.data.process(input3);
-		}
-		oper.endWindow(); //
-		Assert.assertEquals("number emitted tuples", 2, tokenSink.map.size());
-		HashMap<Object, Object> smap = tokenSink.map;
-		for (Map.Entry<Object, Object> e : smap.entrySet()) {
-			HashMap<String, String> kmap = (HashMap<String, String>) e.getKey();
-			for (Map.Entry<String, String> o : kmap.entrySet()) {
-				String key = o.getKey();
-				String val = o.getValue();
-				if (key.equals("a")) {
-					Assert.assertEquals("value of \"a\"", "2", val);
-				} else if (key.equals("b")) {
-					Assert.assertEquals("value of \"b\"", "3", val);
-				} else if (key.equals("c")) {
-					Assert.assertEquals("value of \"c\"", "4", val);
-				} else if (key.equals("d")) {
-					Assert.assertEquals("value of \"d\"", "2", val);
-				}
-			}
-		}
-	}
+    String input1 = "a=2,b=3,c=4";
+    String input2 = "d=2";
+    String input3 = "";
+    int numTuples = 1000;
+    for (int i = 0; i < numTuples; i++) {
+      oper.data.process(input1);
+      oper.data.process(input2);
+      oper.data.process(input3);
+    }
+    oper.endWindow(); //
+    Assert.assertEquals("number emitted tuples", 2, tokenSink.map.size());
+    HashMap<Object, Object> smap = tokenSink.map;
+    for (Map.Entry<Object, Object> e : smap.entrySet()) {
+      HashMap<String, String> kmap = (HashMap<String, String>)e.getKey();
+      for (Map.Entry<String, String> o : kmap.entrySet()) {
+        String key = o.getKey();
+        String val = o.getValue();
+        if (key.equals("a")) {
+          Assert.assertEquals("value of \"a\"", "2", val);
+        } else if (key.equals("b")) {
+          Assert.assertEquals("value of \"b\"", "3", val);
+        } else if (key.equals("c")) {
+          Assert.assertEquals("value of \"c\"", "4", val);
+        } else if (key.equals("d")) {
+          Assert.assertEquals("value of \"d\"", "2", val);
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java b/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java
index 83054aa..94845a7 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java
@@ -23,12 +23,13 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.lang.mutable.MutableDouble;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.lang.mutable.MutableDouble;
+
 import com.datatorrent.lib.logs.MultiWindowDimensionAggregation.AggregateOperation;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
@@ -59,8 +60,8 @@ public class MultiWindowDimensionAggregationTest
 
     oper.setWindowSize(3);
     List<int[]> dimensionArrayList = new ArrayList<int[]>();
-    int[] dimensionArray = { 0, 1 };
-    int[] dimensionArray_2 = { 0 };
+    int[] dimensionArray = {0, 1};
+    int[] dimensionArray_2 = {0};
     dimensionArrayList.add(dimensionArray);
     dimensionArrayList.add(dimensionArray_2);
     oper.setDimensionArray(dimensionArrayList);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/RegexMatchMapOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/RegexMatchMapOperatorTest.java b/library/src/test/java/com/datatorrent/lib/logs/RegexMatchMapOperatorTest.java
index 724935f..76a22d5 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/RegexMatchMapOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/RegexMatchMapOperatorTest.java
@@ -39,7 +39,7 @@ public class RegexMatchMapOperatorTest
   /**
    * Since the regex does not have a default value, ensure the operator raises a RuntimeException if it is not set.
    */
-  @Test(expected=RuntimeException.class)
+  @Test(expected = RuntimeException.class)
   public void testRaisesIfNoRegex()
   {
     String string = "foobar";
@@ -93,24 +93,31 @@ public class RegexMatchMapOperatorTest
   @Test
   public void testMatching()
   {
-    ArrayList<HashMap<String,String>> test_cases = new ArrayList<HashMap<String,String>>();
+    ArrayList<HashMap<String, String>> test_cases = new ArrayList<HashMap<String, String>>();
 
     // class comment example case
-    HashMap<String,String> test_case = new HashMap<String,String>();
+    HashMap<String, String> test_case = new HashMap<String, String>();
     test_case.put("string", "12345 \"foo bar\" baz;goober");
     test_case.put("regex", "^(?<id>\\d+) \"(?<username>[^\"]+)\" (?<action>[^;]+);(?<cookie>.+)");
     test_case.put("fields", "id,username,action,cookie");
     test_cases.add(test_case);
 
     // apache log case
-    test_case = new HashMap<String,String>();
-    test_case.put("string", "127.0.0.1 - - [04/Apr/2013:17:17:21 -0700] \"GET /favicon.ico HTTP/1.1\" 404 498 \"http://www.google.com/\" \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537.31\"");
-    test_case.put("regex", "^(?<ip>[\\d\\.]+) (\\S+) (\\S+) \\[(?<date>[\\w:/]+\\s[+\\-]\\d{4})\\] \"[A-Z]+ (?<url>.+?) HTTP/\\S+\" (?<status>\\d{3}) (?<bytes>\\d+) \"(?<referer>[^\"]+)\" \"(?<agent>[^\"]+)\"");
+    test_case = new HashMap<String, String>();
+    test_case.put("string",
+        "127.0.0.1 - - [04/Apr/2013:17:17:21 -0700] \"GET /favicon.ico HTTP/1.1\" 404 498 \"http://www.google.com/\" " +
+        "\"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.43 Safari/537" +
+        ".31\"");
+
+    test_case.put("regex",
+        "^(?<ip>[\\d\\.]+) (\\S+) (\\S+) \\[(?<date>[\\w:/]+\\s[+\\-]\\d{4})\\] \"[A-Z]+ (?<url>.+?) HTTP/\\S+\" " +
+        "(?<status>\\d{3}) (?<bytes>\\d+) \"(?<referer>[^\"]+)\" \"(?<agent>[^\"]+)\"");
+
     test_case.put("fields", "ip,date,url,status,bytes,referer,agent");
     test_cases.add(test_case);
 
     // iterate through test cases
-    for (HashMap<String,String> curr_case : test_cases) {
+    for (HashMap<String, String> curr_case : test_cases) {
       String string = curr_case.get("string");
       String regex = curr_case.get("regex");
       String[] fields = curr_case.get("fields").split(",");
@@ -133,7 +140,7 @@ public class RegexMatchMapOperatorTest
 
       // fetch the Map that was output
       @SuppressWarnings("unchecked")
-      Map<String,Object> output = (Map<String,Object>)sink.collectedTuples.get(0);
+      Map<String, Object> output = (Map<String, Object>)sink.collectedTuples.get(0);
 
       // debugging output
       log.debug(String.format("Line is  : %s", string));
@@ -146,4 +153,4 @@ public class RegexMatchMapOperatorTest
       }
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/logs/TopNUniqueSiteStatsTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/TopNUniqueSiteStatsTest.java b/library/src/test/java/com/datatorrent/lib/logs/TopNUniqueSiteStatsTest.java
index 83f2976..4faf32f 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/TopNUniqueSiteStatsTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/TopNUniqueSiteStatsTest.java
@@ -21,14 +21,13 @@ package com.datatorrent.lib.logs;
 import java.util.HashMap;
 
 import org.junit.Assert;
-
-import org.apache.commons.lang.mutable.MutableDouble;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.lang.mutable.MutableDouble;
+
 import com.datatorrent.lib.algo.TopNUnique;
-import com.datatorrent.lib.logs.DimensionObject;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
 /**
@@ -36,55 +35,58 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  * This tests the integration between MultiWindowDimensionAggregationOperator and TopNUnique Operator
  *
  */
-public class TopNUniqueSiteStatsTest {
+public class TopNUniqueSiteStatsTest
+{
   private static Logger log = LoggerFactory.getLogger(TopNUniqueSiteStatsTest.class);
 
   /**
    * Test node logic emits correct results
    */
   @Test
-    public void testNodeProcessing() throws Exception {
-      testNodeProcessingSchema(new TopNUnique<String, DimensionObject<String>>());
-
-    }
-
-  @SuppressWarnings({ "rawtypes", "unchecked" })
-    public void testNodeProcessingSchema(TopNUnique oper) {
-      CollectorTestSink sortSink = new CollectorTestSink();
-      oper.top.setSink(sortSink);
-      oper.setN(3);
-
-      oper.beginWindow(0);
-      HashMap<String, DimensionObject<String>> input = new HashMap<String, DimensionObject<String>>();
-
-      input.put("url", new DimensionObject<String>(new MutableDouble(10), "abc"));
-      oper.data.process(input);
-
-      input.clear();
-      input.put("url", new DimensionObject<String>(new MutableDouble(1), "def"));
-      input.put("url1", new DimensionObject<String>(new MutableDouble(1), "def"));
-      oper.data.process(input);
-
-      input.clear();
-      input.put("url", new DimensionObject<String>(new MutableDouble(101), "ghi"));
-      input.put("url1", new DimensionObject<String>(new MutableDouble(101), "ghi"));
-      oper.data.process(input);
-
-      input.clear();
-      input.put("url", new DimensionObject<String>(new MutableDouble(50), "jkl"));
-      oper.data.process(input);
-
-      input.clear();
-      input.put("url", new DimensionObject<String>(new MutableDouble(50), "jkl"));
-      input.put("url3", new DimensionObject<String>(new MutableDouble(50), "jkl"));
-      oper.data.process(input);
-      oper.endWindow();
-
-      Assert.assertEquals("number emitted tuples", 3,	sortSink.collectedTuples.size());
-      for (Object o : sortSink.collectedTuples) {
-        log.debug(o.toString());
-      }
-      log.debug("Done testing round\n");
+  public void testNodeProcessing() throws Exception
+  {
+    testNodeProcessingSchema(new TopNUnique<String, DimensionObject<String>>());
+
+  }
+
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  public void testNodeProcessingSchema(TopNUnique oper)
+  {
+    CollectorTestSink sortSink = new CollectorTestSink();
+    oper.top.setSink(sortSink);
+    oper.setN(3);
+
+    oper.beginWindow(0);
+    HashMap<String, DimensionObject<String>> input = new HashMap<String, DimensionObject<String>>();
+
+    input.put("url", new DimensionObject<String>(new MutableDouble(10), "abc"));
+    oper.data.process(input);
+
+    input.clear();
+    input.put("url", new DimensionObject<String>(new MutableDouble(1), "def"));
+    input.put("url1", new DimensionObject<String>(new MutableDouble(1), "def"));
+    oper.data.process(input);
+
+    input.clear();
+    input.put("url", new DimensionObject<String>(new MutableDouble(101), "ghi"));
+    input.put("url1", new DimensionObject<String>(new MutableDouble(101), "ghi"));
+    oper.data.process(input);
+
+    input.clear();
+    input.put("url", new DimensionObject<String>(new MutableDouble(50), "jkl"));
+    oper.data.process(input);
+
+    input.clear();
+    input.put("url", new DimensionObject<String>(new MutableDouble(50), "jkl"));
+    input.put("url3", new DimensionObject<String>(new MutableDouble(50), "jkl"));
+    oper.data.process(input);
+    oper.endWindow();
+
+    Assert.assertEquals("number emitted tuples", 3, sortSink.collectedTuples.size());
+    for (Object o : sortSink.collectedTuples) {
+      log.debug(o.toString());
     }
+    log.debug("Done testing round\n");
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/AverageKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/AverageKeyValTest.java b/library/src/test/java/com/datatorrent/lib/math/AverageKeyValTest.java
index 5c2634d..0f81106 100644
--- a/library/src/test/java/com/datatorrent/lib/math/AverageKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/AverageKeyValTest.java
@@ -32,58 +32,53 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 public class AverageKeyValTest
 {
-	/**
-	 * Test operator logic emits correct results.
-	 */
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	@Test
-	public void testNodeProcessing()
-	{
-		AverageKeyVal<String> oper = new AverageKeyVal<String>();
-		CollectorTestSink averageSink = new CollectorTestSink();
+  /**
+   * Test operator logic emits correct results.
+   */
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  @Test
+  public void testNodeProcessing()
+  {
+    AverageKeyVal<String> oper = new AverageKeyVal<String>();
+    CollectorTestSink averageSink = new CollectorTestSink();
 
-		oper.doubleAverage.setSink(averageSink);
+    oper.doubleAverage.setSink(averageSink);
 
-		oper.beginWindow(0); //
+    oper.beginWindow(0); //
 
-		oper.data.process(new KeyValPair("a", 2.0));
-		oper.data.process(new KeyValPair("b", 20.0));
-		oper.data.process(new KeyValPair("c", 1000.0));
-		oper.data.process(new KeyValPair("a", 1.0));
-		oper.data.process(new KeyValPair("a", 10.0));
-		oper.data.process(new KeyValPair("b", 5.0));
-		oper.data.process(new KeyValPair("d", 55.0));
-		oper.data.process(new KeyValPair("b", 12.0));
-		oper.data.process(new KeyValPair("d", 22.0));
-		oper.data.process(new KeyValPair("d", 14.2));
-		oper.data.process(new KeyValPair("d", 46.0));
-		oper.data.process(new KeyValPair("e", 2.0));
-		oper.data.process(new KeyValPair("a", 23.0));
-		oper.data.process(new KeyValPair("d", 4.0));
+    oper.data.process(new KeyValPair("a", 2.0));
+    oper.data.process(new KeyValPair("b", 20.0));
+    oper.data.process(new KeyValPair("c", 1000.0));
+    oper.data.process(new KeyValPair("a", 1.0));
+    oper.data.process(new KeyValPair("a", 10.0));
+    oper.data.process(new KeyValPair("b", 5.0));
+    oper.data.process(new KeyValPair("d", 55.0));
+    oper.data.process(new KeyValPair("b", 12.0));
+    oper.data.process(new KeyValPair("d", 22.0));
+    oper.data.process(new KeyValPair("d", 14.2));
+    oper.data.process(new KeyValPair("d", 46.0));
+    oper.data.process(new KeyValPair("e", 2.0));
+    oper.data.process(new KeyValPair("a", 23.0));
+    oper.data.process(new KeyValPair("d", 4.0));
 
-		oper.endWindow(); //
+    oper.endWindow(); //
 
-		Assert.assertEquals("number emitted tuples", 5,
-				averageSink.collectedTuples.size());
-		for (Object o : averageSink.collectedTuples) {
-			KeyValPair<String, Double> e = (KeyValPair<String, Double>) o;
-			Double val = e.getValue();
-			if (e.getKey().equals("a")) {
-				Assert.assertEquals("emitted value for 'a' was ", new Double(36 / 4.0),
-						val);
-			} else if (e.getKey().equals("b")) {
-				Assert.assertEquals("emitted tuple for 'b' was ", new Double(37 / 3.0),
-						val);
-			} else if (e.getKey().equals("c")) {
-				Assert.assertEquals("emitted tuple for 'c' was ",
-						new Double(1000 / 1.0), val);
-			} else if (e.getKey().equals("d")) {
-				Assert.assertEquals("emitted tuple for 'd' was ",
-						new Double(141.2 / 5), val);
-			} else if (e.getKey().equals("e")) {
-				Assert.assertEquals("emitted tuple for 'e' was ", new Double(2 / 1.0),
-						val);
-			}
-		}
-	}
+    Assert.assertEquals("number emitted tuples", 5,
+        averageSink.collectedTuples.size());
+    for (Object o : averageSink.collectedTuples) {
+      KeyValPair<String, Double> e = (KeyValPair<String, Double>)o;
+      Double val = e.getValue();
+      if (e.getKey().equals("a")) {
+        Assert.assertEquals("emitted value for 'a' was ", new Double(36 / 4.0), val);
+      } else if (e.getKey().equals("b")) {
+        Assert.assertEquals("emitted tuple for 'b' was ", new Double(37 / 3.0), val);
+      } else if (e.getKey().equals("c")) {
+        Assert.assertEquals("emitted tuple for 'c' was ", new Double(1000 / 1.0), val);
+      } else if (e.getKey().equals("d")) {
+        Assert.assertEquals("emitted tuple for 'd' was ", new Double(141.2 / 5), val);
+      } else if (e.getKey().equals("e")) {
+        Assert.assertEquals("emitted tuple for 'e' was ", new Double(2 / 1.0), val);
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/AverageTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/AverageTest.java b/library/src/test/java/com/datatorrent/lib/math/AverageTest.java
index 166e929..1973bec 100644
--- a/library/src/test/java/com/datatorrent/lib/math/AverageTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/AverageTest.java
@@ -31,75 +31,73 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class AverageTest
 {
-	/**
-	 * Test operator logic emits correct results.
-	 */
-	@Test
-	public void testNodeProcessing()
-	{
-		Average<Double> doper = new Average<Double>();
-		Average<Float> foper = new Average<Float>();
-		Average<Integer> ioper = new Average<Integer>();
-		Average<Long> loper = new Average<Long>();
-		Average<Short> soper = new Average<Short>();
-		doper.setType(Double.class);
-		foper.setType(Float.class);
-		ioper.setType(Integer.class);
-		loper.setType(Long.class);
-		soper.setType(Short.class);
+  /**
+   * Test operator logic emits correct results.
+   */
+  @Test
+  public void testNodeProcessing()
+  {
+    Average<Double> doper = new Average<Double>();
+    Average<Float> foper = new Average<Float>();
+    Average<Integer> ioper = new Average<Integer>();
+    Average<Long> loper = new Average<Long>();
+    Average<Short> soper = new Average<Short>();
+    doper.setType(Double.class);
+    foper.setType(Float.class);
+    ioper.setType(Integer.class);
+    loper.setType(Long.class);
+    soper.setType(Short.class);
 
-		testNodeSchemaProcessing(doper);
-		testNodeSchemaProcessing(foper);
-		testNodeSchemaProcessing(ioper);
-		testNodeSchemaProcessing(loper);
-		testNodeSchemaProcessing(soper);
-	}
+    testNodeSchemaProcessing(doper);
+    testNodeSchemaProcessing(foper);
+    testNodeSchemaProcessing(ioper);
+    testNodeSchemaProcessing(loper);
+    testNodeSchemaProcessing(soper);
+  }
 
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public void testNodeSchemaProcessing(Average oper)
-	{
-		CollectorTestSink averageSink = new CollectorTestSink();
-		oper.average.setSink(averageSink);
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  public void testNodeSchemaProcessing(Average oper)
+  {
+    CollectorTestSink averageSink = new CollectorTestSink();
+    oper.average.setSink(averageSink);
 
-		oper.beginWindow(0); //
+    oper.beginWindow(0); //
 
-		Double a = new Double(2.0);
-		Double b = new Double(20.0);
-		Double c = new Double(1000.0);
+    Double a = new Double(2.0);
+    Double b = new Double(20.0);
+    Double c = new Double(1000.0);
 
-		oper.data.process(a);
-		oper.data.process(b);
-		oper.data.process(c);
+    oper.data.process(a);
+    oper.data.process(b);
+    oper.data.process(c);
 
-		a = 1.0;
-		oper.data.process(a);
-		a = 10.0;
-		oper.data.process(a);
-		b = 5.0;
-		oper.data.process(b);
+    a = 1.0;
+    oper.data.process(a);
+    a = 10.0;
+    oper.data.process(a);
+    b = 5.0;
+    oper.data.process(b);
 
-		b = 12.0;
-		oper.data.process(b);
-		c = 22.0;
-		oper.data.process(c);
-		c = 14.0;
-		oper.data.process(c);
+    b = 12.0;
+    oper.data.process(b);
+    c = 22.0;
+    oper.data.process(c);
+    c = 14.0;
+    oper.data.process(c);
 
-		a = 46.0;
-		oper.data.process(a);
-		b = 2.0;
-		oper.data.process(b);
-		a = 23.0;
-		oper.data.process(a);
+    a = 46.0;
+    oper.data.process(a);
+    b = 2.0;
+    oper.data.process(b);
+    a = 23.0;
+    oper.data.process(a);
 
-		oper.endWindow(); //
+    oper.endWindow(); //
 
-		Assert.assertEquals("number emitted tuples", 1,
-				averageSink.collectedTuples.size());
-		for (Object o : averageSink.collectedTuples) { // count is 12
-			Integer val = ((Number) o).intValue();
-			Assert.assertEquals("emitted average value was was ", new Integer(
-					1157 / 12), val);
-		}
-	}
+    Assert.assertEquals("number emitted tuples", 1, averageSink.collectedTuples.size());
+    for (Object o : averageSink.collectedTuples) { // count is 12
+      Integer val = ((Number)o).intValue();
+      Assert.assertEquals("emitted average value was was ", new Integer(1157 / 12), val);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/ChangeAlertKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/ChangeAlertKeyValTest.java b/library/src/test/java/com/datatorrent/lib/math/ChangeAlertKeyValTest.java
index 05f2b73..7d7842e 100644
--- a/library/src/test/java/com/datatorrent/lib/math/ChangeAlertKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/ChangeAlertKeyValTest.java
@@ -35,73 +35,73 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 public class ChangeAlertKeyValTest
 {
-	private static Logger log = LoggerFactory
-			.getLogger(ChangeAlertKeyValTest.class);
+  private static Logger log = LoggerFactory
+      .getLogger(ChangeAlertKeyValTest.class);
 
-	/**
-	 * Test node logic emits correct results.
-	 */
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		testNodeProcessingSchema(new ChangeAlertKeyVal<String, Integer>());
-		testNodeProcessingSchema(new ChangeAlertKeyVal<String, Double>());
-		testNodeProcessingSchema(new ChangeAlertKeyVal<String, Float>());
-		testNodeProcessingSchema(new ChangeAlertKeyVal<String, Short>());
-		testNodeProcessingSchema(new ChangeAlertKeyVal<String, Long>());
-	}
+  /**
+   * Test node logic emits correct results.
+   */
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    testNodeProcessingSchema(new ChangeAlertKeyVal<String, Integer>());
+    testNodeProcessingSchema(new ChangeAlertKeyVal<String, Double>());
+    testNodeProcessingSchema(new ChangeAlertKeyVal<String, Float>());
+    testNodeProcessingSchema(new ChangeAlertKeyVal<String, Short>());
+    testNodeProcessingSchema(new ChangeAlertKeyVal<String, Long>());
+  }
 
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   public <V extends Number> void testNodeProcessingSchema(
-			ChangeAlertKeyVal<String, V> oper)
-	{
-		CollectorTestSink alertSink = new CollectorTestSink();
+      ChangeAlertKeyVal<String, V> oper)
+  {
+    CollectorTestSink alertSink = new CollectorTestSink();
 
-		oper.alert.setSink(alertSink);
-		oper.setPercentThreshold(5);
+    oper.alert.setSink(alertSink);
+    oper.setPercentThreshold(5);
 
-		oper.beginWindow(0);
-		oper.data.process(new KeyValPair<String, V>("a", oper.getValue(200)));
-		oper.data.process(new KeyValPair<String, V>("b", oper.getValue(10)));
-		oper.data.process(new KeyValPair<String, V>("c", oper.getValue(100)));
+    oper.beginWindow(0);
+    oper.data.process(new KeyValPair<String, V>("a", oper.getValue(200)));
+    oper.data.process(new KeyValPair<String, V>("b", oper.getValue(10)));
+    oper.data.process(new KeyValPair<String, V>("c", oper.getValue(100)));
 
-		oper.data.process(new KeyValPair<String, V>("a", oper.getValue(203)));
-		oper.data.process(new KeyValPair<String, V>("b", oper.getValue(12)));
-		oper.data.process(new KeyValPair<String, V>("c", oper.getValue(101)));
+    oper.data.process(new KeyValPair<String, V>("a", oper.getValue(203)));
+    oper.data.process(new KeyValPair<String, V>("b", oper.getValue(12)));
+    oper.data.process(new KeyValPair<String, V>("c", oper.getValue(101)));
 
-		oper.data.process(new KeyValPair<String, V>("a", oper.getValue(210)));
-		oper.data.process(new KeyValPair<String, V>("b", oper.getValue(12)));
-		oper.data.process(new KeyValPair<String, V>("c", oper.getValue(102)));
+    oper.data.process(new KeyValPair<String, V>("a", oper.getValue(210)));
+    oper.data.process(new KeyValPair<String, V>("b", oper.getValue(12)));
+    oper.data.process(new KeyValPair<String, V>("c", oper.getValue(102)));
 
-		oper.data.process(new KeyValPair<String, V>("a", oper.getValue(231)));
-		oper.data.process(new KeyValPair<String, V>("b", oper.getValue(18)));
-		oper.data.process(new KeyValPair<String, V>("c", oper.getValue(103)));
-		oper.endWindow();
+    oper.data.process(new KeyValPair<String, V>("a", oper.getValue(231)));
+    oper.data.process(new KeyValPair<String, V>("b", oper.getValue(18)));
+    oper.data.process(new KeyValPair<String, V>("c", oper.getValue(103)));
+    oper.endWindow();
 
-		// One for a, Two for b
-		Assert.assertEquals("number emitted tuples", 3,
-				alertSink.collectedTuples.size());
+    // One for a, Two for b
+    Assert.assertEquals("number emitted tuples", 3,
+        alertSink.collectedTuples.size());
 
-		double aval = 0;
-		double bval = 0;
-		log.debug("\nLogging tuples");
-		for (Object o : alertSink.collectedTuples) {
-			KeyValPair<String, KeyValPair<Number, Double>> map = (KeyValPair<String, KeyValPair<Number, Double>>) o;
+    double aval = 0;
+    double bval = 0;
+    log.debug("\nLogging tuples");
+    for (Object o : alertSink.collectedTuples) {
+      KeyValPair<String, KeyValPair<Number, Double>> map = (KeyValPair<String, KeyValPair<Number, Double>>)o;
 
-			log.debug(o.toString());
-			if (map.getKey().equals("a")) {
-				KeyValPair<Number, Double> vmap = map.getValue();
-				if (vmap != null) {
-					aval += vmap.getValue().doubleValue();
-				}
-			} else {
-				KeyValPair<Number, Double> vmap = map.getValue();
-				if (vmap != null) {
-					bval += vmap.getValue().doubleValue();
-				}
-			}
-		}
-		Assert.assertEquals("change in a", 10.0, aval,0);
-		Assert.assertEquals("change in a", 70.0, bval,0);
-	}
+      log.debug(o.toString());
+      if (map.getKey().equals("a")) {
+        KeyValPair<Number, Double> vmap = map.getValue();
+        if (vmap != null) {
+          aval += vmap.getValue().doubleValue();
+        }
+      } else {
+        KeyValPair<Number, Double> vmap = map.getValue();
+        if (vmap != null) {
+          bval += vmap.getValue().doubleValue();
+        }
+      }
+    }
+    Assert.assertEquals("change in a", 10.0, aval,0);
+    Assert.assertEquals("change in a", 70.0, bval,0);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/ChangeAlertMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/ChangeAlertMapTest.java b/library/src/test/java/com/datatorrent/lib/math/ChangeAlertMapTest.java
index 95eae77..51f52f0 100644
--- a/library/src/test/java/com/datatorrent/lib/math/ChangeAlertMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/ChangeAlertMapTest.java
@@ -35,81 +35,81 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class ChangeAlertMapTest
 {
-	private static Logger log = LoggerFactory.getLogger(ChangeAlertMapTest.class);
+  private static Logger log = LoggerFactory.getLogger(ChangeAlertMapTest.class);
 
-	/**
-	 * Test node logic emits correct results.
-	 */
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		testNodeProcessingSchema(new ChangeAlertMap<String, Integer>());
-		testNodeProcessingSchema(new ChangeAlertMap<String, Double>());
-		testNodeProcessingSchema(new ChangeAlertMap<String, Float>());
-		testNodeProcessingSchema(new ChangeAlertMap<String, Short>());
-		testNodeProcessingSchema(new ChangeAlertMap<String, Long>());
-	}
+  /**
+   * Test node logic emits correct results.
+   */
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    testNodeProcessingSchema(new ChangeAlertMap<String, Integer>());
+    testNodeProcessingSchema(new ChangeAlertMap<String, Double>());
+    testNodeProcessingSchema(new ChangeAlertMap<String, Float>());
+    testNodeProcessingSchema(new ChangeAlertMap<String, Short>());
+    testNodeProcessingSchema(new ChangeAlertMap<String, Long>());
+  }
 
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   public <V extends Number> void testNodeProcessingSchema(
-			ChangeAlertMap<String, V> oper)
-	{
-		CollectorTestSink alertSink = new CollectorTestSink();
+      ChangeAlertMap<String, V> oper)
+  {
+    CollectorTestSink alertSink = new CollectorTestSink();
 
-		oper.alert.setSink(alertSink);
-		oper.setPercentThreshold(5);
+    oper.alert.setSink(alertSink);
+    oper.setPercentThreshold(5);
 
-		oper.beginWindow(0);
-		HashMap<String, V> input = new HashMap<String, V>();
-		input.put("a", oper.getValue(200));
-		input.put("b", oper.getValue(10));
-		input.put("c", oper.getValue(100));
-		oper.data.process(input);
+    oper.beginWindow(0);
+    HashMap<String, V> input = new HashMap<String, V>();
+    input.put("a", oper.getValue(200));
+    input.put("b", oper.getValue(10));
+    input.put("c", oper.getValue(100));
+    oper.data.process(input);
 
-		input.clear();
-		input.put("a", oper.getValue(203));
-		input.put("b", oper.getValue(12));
-		input.put("c", oper.getValue(101));
-		oper.data.process(input);
+    input.clear();
+    input.put("a", oper.getValue(203));
+    input.put("b", oper.getValue(12));
+    input.put("c", oper.getValue(101));
+    oper.data.process(input);
 
-		input.clear();
-		input.put("a", oper.getValue(210));
-		input.put("b", oper.getValue(12));
-		input.put("c", oper.getValue(102));
-		oper.data.process(input);
+    input.clear();
+    input.put("a", oper.getValue(210));
+    input.put("b", oper.getValue(12));
+    input.put("c", oper.getValue(102));
+    oper.data.process(input);
 
-		input.clear();
-		input.put("a", oper.getValue(231));
-		input.put("b", oper.getValue(18));
-		input.put("c", oper.getValue(103));
-		oper.data.process(input);
-		oper.endWindow();
+    input.clear();
+    input.put("a", oper.getValue(231));
+    input.put("b", oper.getValue(18));
+    input.put("c", oper.getValue(103));
+    oper.data.process(input);
+    oper.endWindow();
 
-		// One for a, Two for b
-		Assert.assertEquals("number emitted tuples", 3,
-				alertSink.collectedTuples.size());
+    // One for a, Two for b
+    Assert.assertEquals("number emitted tuples", 3,
+        alertSink.collectedTuples.size());
 
-		double aval = 0;
-		double bval = 0;
-		log.debug("\nLogging tuples");
-		for (Object o : alertSink.collectedTuples) {
-			HashMap<String, HashMap<Number, Double>> map = (HashMap<String, HashMap<Number, Double>>) o;
-			Assert.assertEquals("map size", 1, map.size());
-			log.debug(o.toString());
-			HashMap<Number, Double> vmap = map.get("a");
-			if (vmap != null) {
-				aval += vmap.get(231.0).doubleValue();
-			}
-			vmap = map.get("b");
-			if (vmap != null) {
-				if (vmap.get(12.0) != null) {
-					bval += vmap.get(12.0).doubleValue();
-				} else {
-					bval += vmap.get(18.0).doubleValue();
-				}
-			}
-		}
-		Assert.assertEquals("change in a", 10.0, aval,0);
-		Assert.assertEquals("change in a", 70.0, bval,0);
-	}
+    double aval = 0;
+    double bval = 0;
+    log.debug("\nLogging tuples");
+    for (Object o : alertSink.collectedTuples) {
+      HashMap<String, HashMap<Number, Double>> map = (HashMap<String, HashMap<Number, Double>>)o;
+      Assert.assertEquals("map size", 1, map.size());
+      log.debug(o.toString());
+      HashMap<Number, Double> vmap = map.get("a");
+      if (vmap != null) {
+        aval += vmap.get(231.0).doubleValue();
+      }
+      vmap = map.get("b");
+      if (vmap != null) {
+        if (vmap.get(12.0) != null) {
+          bval += vmap.get(12.0).doubleValue();
+        } else {
+          bval += vmap.get(18.0).doubleValue();
+        }
+      }
+    }
+    Assert.assertEquals("change in a", 10.0, aval,0);
+    Assert.assertEquals("change in a", 70.0, bval,0);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/ChangeKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/ChangeKeyValTest.java b/library/src/test/java/com/datatorrent/lib/math/ChangeKeyValTest.java
index 6ebddad..6c2151b 100644
--- a/library/src/test/java/com/datatorrent/lib/math/ChangeKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/ChangeKeyValTest.java
@@ -34,78 +34,78 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 public class ChangeKeyValTest
 {
-	private static Logger log = LoggerFactory.getLogger(ChangeKeyValTest.class);
+  private static Logger log = LoggerFactory.getLogger(ChangeKeyValTest.class);
 
-	/**
-	 * Test node logic emits correct results.
-	 */
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		testNodeProcessingSchema(new ChangeKeyVal<String, Integer>());
-		testNodeProcessingSchema(new ChangeKeyVal<String, Double>());
-		testNodeProcessingSchema(new ChangeKeyVal<String, Float>());
-		testNodeProcessingSchema(new ChangeKeyVal<String, Short>());
-		testNodeProcessingSchema(new ChangeKeyVal<String, Long>());
-	}
+  /**
+   * Test node logic emits correct results.
+   */
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    testNodeProcessingSchema(new ChangeKeyVal<String, Integer>());
+    testNodeProcessingSchema(new ChangeKeyVal<String, Double>());
+    testNodeProcessingSchema(new ChangeKeyVal<String, Float>());
+    testNodeProcessingSchema(new ChangeKeyVal<String, Short>());
+    testNodeProcessingSchema(new ChangeKeyVal<String, Long>());
+  }
 
-	/**
-	 *
-	 * @param oper
-	 *          key/value pair for comparison.
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  /**
+   *
+   * @param oper
+   *          key/value pair for comparison.
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   public <V extends Number> void testNodeProcessingSchema(
-			ChangeKeyVal<String, V> oper)
-	{
-		CollectorTestSink changeSink = new CollectorTestSink();
-		CollectorTestSink percentSink = new CollectorTestSink();
+      ChangeKeyVal<String, V> oper)
+  {
+    CollectorTestSink changeSink = new CollectorTestSink();
+    CollectorTestSink percentSink = new CollectorTestSink();
 
-		oper.change.setSink(changeSink);
-		oper.percent.setSink(percentSink);
+    oper.change.setSink(changeSink);
+    oper.percent.setSink(percentSink);
 
-		oper.beginWindow(0);
-		oper.base.process(new KeyValPair<String, V>("a", oper.getValue(2)));
-		oper.base.process(new KeyValPair<String, V>("b", oper.getValue(10)));
-		oper.base.process(new KeyValPair<String, V>("c", oper.getValue(100)));
+    oper.beginWindow(0);
+    oper.base.process(new KeyValPair<String, V>("a", oper.getValue(2)));
+    oper.base.process(new KeyValPair<String, V>("b", oper.getValue(10)));
+    oper.base.process(new KeyValPair<String, V>("c", oper.getValue(100)));
 
-		oper.data.process(new KeyValPair<String, V>("a", oper.getValue(3)));
-		oper.data.process(new KeyValPair<String, V>("b", oper.getValue(2)));
-		oper.data.process(new KeyValPair<String, V>("c", oper.getValue(4)));
+    oper.data.process(new KeyValPair<String, V>("a", oper.getValue(3)));
+    oper.data.process(new KeyValPair<String, V>("b", oper.getValue(2)));
+    oper.data.process(new KeyValPair<String, V>("c", oper.getValue(4)));
 
-		oper.endWindow();
+    oper.endWindow();
 
-		// One for each key
-		Assert.assertEquals("number emitted tuples", 3,
-				changeSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 3,
-				percentSink.collectedTuples.size());
+    // One for each key
+    Assert.assertEquals("number emitted tuples", 3,
+        changeSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 3,
+        percentSink.collectedTuples.size());
 
-		log.debug("\nLogging tuples");
-		for (Object o : changeSink.collectedTuples) {
-			KeyValPair<String, Number> kv = (KeyValPair<String, Number>) o;
-			if (kv.getKey().equals("a")) {
-				Assert.assertEquals("change in a ", 1.0, kv.getValue());
-			}
-			if (kv.getKey().equals("b")) {
-				Assert.assertEquals("change in b ", -8.0, kv.getValue());
-			}
-			if (kv.getKey().equals("c")) {
-				Assert.assertEquals("change in c ", -96.0, kv.getValue());
-			}
-		}
+    log.debug("\nLogging tuples");
+    for (Object o : changeSink.collectedTuples) {
+      KeyValPair<String, Number> kv = (KeyValPair<String, Number>)o;
+      if (kv.getKey().equals("a")) {
+        Assert.assertEquals("change in a ", 1.0, kv.getValue());
+      }
+      if (kv.getKey().equals("b")) {
+        Assert.assertEquals("change in b ", -8.0, kv.getValue());
+      }
+      if (kv.getKey().equals("c")) {
+        Assert.assertEquals("change in c ", -96.0, kv.getValue());
+      }
+    }
 
-		for (Object o : percentSink.collectedTuples) {
-			KeyValPair<String, Number> kv = (KeyValPair<String, Number>) o;
-			if (kv.getKey().equals("a")) {
-				Assert.assertEquals("change in a ", 50.0, kv.getValue());
-			}
-			if (kv.getKey().equals("b")) {
-				Assert.assertEquals("change in b ", -80.0, kv.getValue());
-			}
-			if (kv.getKey().equals("c")) {
-				Assert.assertEquals("change in c ", -96.0, kv.getValue());
-			}
-		}
-	}
+    for (Object o : percentSink.collectedTuples) {
+      KeyValPair<String, Number> kv = (KeyValPair<String, Number>)o;
+      if (kv.getKey().equals("a")) {
+        Assert.assertEquals("change in a ", 50.0, kv.getValue());
+      }
+      if (kv.getKey().equals("b")) {
+        Assert.assertEquals("change in b ", -80.0, kv.getValue());
+      }
+      if (kv.getKey().equals("c")) {
+        Assert.assertEquals("change in c ", -96.0, kv.getValue());
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/ChangeTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/ChangeTest.java b/library/src/test/java/com/datatorrent/lib/math/ChangeTest.java
index b7607da..9595a5d 100644
--- a/library/src/test/java/com/datatorrent/lib/math/ChangeTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/ChangeTest.java
@@ -33,52 +33,52 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class ChangeTest
 {
-	private static Logger log = LoggerFactory.getLogger(ChangeTest.class);
+  private static Logger log = LoggerFactory.getLogger(ChangeTest.class);
 
-	/**
-	 * Test node logic emits correct results.
-	 */
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		testNodeProcessingSchema(new Change<Integer>());
-		testNodeProcessingSchema(new Change<Double>());
-		testNodeProcessingSchema(new Change<Float>());
-		testNodeProcessingSchema(new Change<Short>());
-		testNodeProcessingSchema(new Change<Long>());
-	}
+  /**
+   * Test node logic emits correct results.
+   */
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    testNodeProcessingSchema(new Change<Integer>());
+    testNodeProcessingSchema(new Change<Double>());
+    testNodeProcessingSchema(new Change<Float>());
+    testNodeProcessingSchema(new Change<Short>());
+    testNodeProcessingSchema(new Change<Long>());
+  }
 
-	/**
-	 *
-	 * @param oper  Data value for comparison.
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  /**
+   *
+   * @param oper  Data value for comparison.
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   public <V extends Number> void testNodeProcessingSchema(Change<V> oper)
-	{
-		CollectorTestSink changeSink = new CollectorTestSink();
-		CollectorTestSink percentSink = new CollectorTestSink();
+  {
+    CollectorTestSink changeSink = new CollectorTestSink();
+    CollectorTestSink percentSink = new CollectorTestSink();
 
-		oper.change.setSink(changeSink);
-		oper.percent.setSink(percentSink);
+    oper.change.setSink(changeSink);
+    oper.percent.setSink(percentSink);
 
-		oper.beginWindow(0);
-		oper.base.process(oper.getValue(10));
-		oper.data.process(oper.getValue(5));
-		oper.data.process(oper.getValue(15));
-		oper.data.process(oper.getValue(20));
-		oper.endWindow();
+    oper.beginWindow(0);
+    oper.base.process(oper.getValue(10));
+    oper.data.process(oper.getValue(5));
+    oper.data.process(oper.getValue(15));
+    oper.data.process(oper.getValue(20));
+    oper.endWindow();
 
-		Assert.assertEquals("number emitted tuples", 3,
-				changeSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 3,
-				percentSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 3,
+        changeSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 3,
+        percentSink.collectedTuples.size());
 
-		log.debug("\nLogging tuples");
-		for (Object o : changeSink.collectedTuples) {
-			log.debug(String.format("change %s", o));
-		}
-		for (Object o : percentSink.collectedTuples) {
-			log.debug(String.format("percent change %s", o));
-		}
-	}
+    log.debug("\nLogging tuples");
+    for (Object o : changeSink.collectedTuples) {
+      log.debug(String.format("change %s", o));
+    }
+    for (Object o : percentSink.collectedTuples) {
+      log.debug(String.format("percent change %s", o));
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/CompareExceptMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/CompareExceptMapTest.java b/library/src/test/java/com/datatorrent/lib/math/CompareExceptMapTest.java
index 085f0c8..46b9609 100644
--- a/library/src/test/java/com/datatorrent/lib/math/CompareExceptMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/CompareExceptMapTest.java
@@ -73,28 +73,24 @@ public class CompareExceptMapTest
 
     // One for each key
     Assert.assertEquals("number emitted tuples", 1, exceptSink.count);
-    for (Map.Entry<String, Number> e: ((HashMap<String, Number>)exceptSink.tuple).entrySet()) {
+    for (Map.Entry<String, Number> e : ((HashMap<String, Number>)exceptSink.tuple).entrySet()) {
       if (e.getKey().equals("a")) {
-        Assert.assertEquals("emitted value for 'a' was ", new Double(2), e.getValue().doubleValue(),0);
-      }
-      else if (e.getKey().equals("b")) {
-        Assert.assertEquals("emitted tuple for 'b' was ", new Double(20), e.getValue().doubleValue(),0);
-      }
-      else if (e.getKey().equals("c")) {
-        Assert.assertEquals("emitted tuple for 'c' was ", new Double(1000), e.getValue().doubleValue(),0);
+        Assert.assertEquals("emitted value for 'a' was ", new Double(2), e.getValue().doubleValue(), 0);
+      } else if (e.getKey().equals("b")) {
+        Assert.assertEquals("emitted tuple for 'b' was ", new Double(20), e.getValue().doubleValue(), 0);
+      } else if (e.getKey().equals("c")) {
+        Assert.assertEquals("emitted tuple for 'c' was ", new Double(1000), e.getValue().doubleValue(), 0);
       }
     }
 
     Assert.assertEquals("number emitted tuples", 1, compareSink.count);
-    for (Map.Entry<String, Number> e: ((HashMap<String, Number>)compareSink.tuple).entrySet()) {
+    for (Map.Entry<String, Number> e : ((HashMap<String, Number>)compareSink.tuple).entrySet()) {
       if (e.getKey().equals("a")) {
-        Assert.assertEquals("emitted value for 'a' was ", new Double(3), e.getValue().doubleValue(),0);
-      }
-      else if (e.getKey().equals("b")) {
-        Assert.assertEquals("emitted tuple for 'b' was ", new Double(21), e.getValue().doubleValue(),0);
-      }
-      else if (e.getKey().equals("c")) {
-        Assert.assertEquals("emitted tuple for 'c' was ", new Double(30), e.getValue().doubleValue(),0);
+        Assert.assertEquals("emitted value for 'a' was ", new Double(3), e.getValue().doubleValue(), 0);
+      } else if (e.getKey().equals("b")) {
+        Assert.assertEquals("emitted tuple for 'b' was ", new Double(21), e.getValue().doubleValue(), 0);
+      } else if (e.getKey().equals("c")) {
+        Assert.assertEquals("emitted tuple for 'c' was ", new Double(30), e.getValue().doubleValue(), 0);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/CompareMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/CompareMapTest.java b/library/src/test/java/com/datatorrent/lib/math/CompareMapTest.java
index 5e5ef15..c21019b 100644
--- a/library/src/test/java/com/datatorrent/lib/math/CompareMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/CompareMapTest.java
@@ -69,14 +69,12 @@ public class CompareMapTest
 
     // One for each key
     Assert.assertEquals("number emitted tuples", 1, matchSink.count);
-    for (Map.Entry<String, Number> e: ((HashMap<String, Number>)matchSink.tuple).entrySet()) {
+    for (Map.Entry<String, Number> e : ((HashMap<String, Number>)matchSink.tuple).entrySet()) {
       if (e.getKey().equals("a")) {
         Assert.assertEquals("emitted value for 'a' was ", new Double(2), e.getValue().doubleValue(), 0);
-      }
-      else if (e.getKey().equals("b")) {
+      } else if (e.getKey().equals("b")) {
         Assert.assertEquals("emitted tuple for 'b' was ", new Double(20), e.getValue().doubleValue(), 0);
-      }
-      else if (e.getKey().equals("c")) {
+      } else if (e.getKey().equals("c")) {
         Assert.assertEquals("emitted tuple for 'c' was ", new Double(1000), e.getValue().doubleValue(), 0);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/CountKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/CountKeyValTest.java b/library/src/test/java/com/datatorrent/lib/math/CountKeyValTest.java
index 3b24fd3..0b1d9de 100644
--- a/library/src/test/java/com/datatorrent/lib/math/CountKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/CountKeyValTest.java
@@ -63,22 +63,18 @@ public class CountKeyValTest
 
     // payload should be 1 bag of tuples with keys "a", "b", "c", "d", "e"
     Assert.assertEquals("number emitted tuples", 5, countSink.collectedTuples.size());
-    for (Object o: countSink.collectedTuples) {
+    for (Object o : countSink.collectedTuples) {
       KeyValPair<String, Integer> e = (KeyValPair<String, Integer>)o;
       Integer val = (Integer)e.getValue();
       if (e.getKey().equals("a")) {
         Assert.assertEquals("emitted value for 'a' was ", 4, val.intValue());
-      }
-      else if (e.getKey().equals("b")) {
+      } else if (e.getKey().equals("b")) {
         Assert.assertEquals("emitted tuple for 'b' was ", 3, val.intValue());
-      }
-      else if (e.getKey().equals("c")) {
+      } else if (e.getKey().equals("c")) {
         Assert.assertEquals("emitted tuple for 'c' was ", 1, val.intValue());
-      }
-      else if (e.getKey().equals("d")) {
+      } else if (e.getKey().equals("d")) {
         Assert.assertEquals("emitted tuple for 'd' was ", 5, val.intValue());
-      }
-      else if (e.getKey().equals("e")) {
+      } else if (e.getKey().equals("e")) {
         Assert.assertEquals("emitted tuple for 'e' was ", 1, val.intValue());
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/CountOccuranceTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/CountOccuranceTest.java b/library/src/test/java/com/datatorrent/lib/math/CountOccuranceTest.java
index 9eac2d4..57b79d7 100644
--- a/library/src/test/java/com/datatorrent/lib/math/CountOccuranceTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/CountOccuranceTest.java
@@ -28,13 +28,13 @@ import com.datatorrent.lib.testbench.CountOccurance;
  */
 public class CountOccuranceTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  @SuppressWarnings({"rawtypes", "unchecked"})
   @Test
-	public void testProcess()
-	{
-		CountOccurance oper = new CountOccurance();
-		oper.setup(null);
-		CollectorTestSink sink = new CollectorTestSink();
+  public void testProcess()
+  {
+    CountOccurance oper = new CountOccurance();
+    oper.setup(null);
+    CollectorTestSink sink = new CollectorTestSink();
     oper.outport.setSink(sink);
 
     oper.beginWindow(1);
@@ -43,5 +43,5 @@ public class CountOccuranceTest
     oper.endWindow();
 
     Assert.assertEquals("number emitted tuples", 1, sink.collectedTuples.size());
-	}
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/DivisionTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/DivisionTest.java b/library/src/test/java/com/datatorrent/lib/math/DivisionTest.java
index fabdb38..9fe556f 100644
--- a/library/src/test/java/com/datatorrent/lib/math/DivisionTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/DivisionTest.java
@@ -23,7 +23,6 @@ import org.junit.Test;
 
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
-
 /**
  *
  * Functional tests for {@link com.datatorrent.lib.math.Division}
@@ -32,70 +31,70 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class DivisionTest
 {
-	/**
-	 * Test operator logic emits correct results.
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  /**
+   * Test operator logic emits correct results.
+   */
+  @SuppressWarnings({"rawtypes", "unchecked"})
   @Test
-	public void testNodeProcessing()
-	{
-		Division oper = new Division();
-		CollectorTestSink lqSink = new CollectorTestSink();
-		CollectorTestSink iqSink = new CollectorTestSink();
-		CollectorTestSink dqSink = new CollectorTestSink();
-		CollectorTestSink fqSink = new CollectorTestSink();
-		CollectorTestSink lrSink = new CollectorTestSink();
-		CollectorTestSink irSink = new CollectorTestSink();
-		CollectorTestSink drSink = new CollectorTestSink();
-		CollectorTestSink frSink = new CollectorTestSink();
-		CollectorTestSink eSink = new CollectorTestSink();
+  public void testNodeProcessing()
+  {
+    Division oper = new Division();
+    CollectorTestSink lqSink = new CollectorTestSink();
+    CollectorTestSink iqSink = new CollectorTestSink();
+    CollectorTestSink dqSink = new CollectorTestSink();
+    CollectorTestSink fqSink = new CollectorTestSink();
+    CollectorTestSink lrSink = new CollectorTestSink();
+    CollectorTestSink irSink = new CollectorTestSink();
+    CollectorTestSink drSink = new CollectorTestSink();
+    CollectorTestSink frSink = new CollectorTestSink();
+    CollectorTestSink eSink = new CollectorTestSink();
 
-		oper.longQuotient.setSink(lqSink);
-		oper.integerQuotient.setSink(iqSink);
-		oper.doubleQuotient.setSink(dqSink);
-		oper.floatQuotient.setSink(fqSink);
-		oper.longRemainder.setSink(lrSink);
-		oper.doubleRemainder.setSink(drSink);
-		oper.floatRemainder.setSink(frSink);
-		oper.integerRemainder.setSink(irSink);
-		oper.errordata.setSink(eSink);
+    oper.longQuotient.setSink(lqSink);
+    oper.integerQuotient.setSink(iqSink);
+    oper.doubleQuotient.setSink(dqSink);
+    oper.floatQuotient.setSink(fqSink);
+    oper.longRemainder.setSink(lrSink);
+    oper.doubleRemainder.setSink(drSink);
+    oper.floatRemainder.setSink(frSink);
+    oper.integerRemainder.setSink(irSink);
+    oper.errordata.setSink(eSink);
 
-		oper.beginWindow(0); //
-		oper.denominator.process(5);
-		oper.numerator.process(11);
-		oper.denominator.process(0);
-		oper.endWindow(); //
+    oper.beginWindow(0); //
+    oper.denominator.process(5);
+    oper.numerator.process(11);
+    oper.denominator.process(0);
+    oper.endWindow(); //
 
-		Assert.assertEquals("number emitted tuples", 1,
-				lqSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				iqSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				dqSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				fqSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				lrSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				irSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				drSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				frSink.collectedTuples.size());
-		Assert.assertEquals("number emitted tuples", 1,
-				eSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        lqSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        iqSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        dqSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        fqSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        lrSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        irSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        drSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        frSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", 1,
+        eSink.collectedTuples.size());
 
-		Assert.assertEquals("quotient is", new Long(2),
-				lqSink.collectedTuples.get(0));
-		Assert.assertEquals("quotient is", 2, iqSink.collectedTuples.get(0));
-		Assert.assertEquals("quotient is", 2.2, dqSink.collectedTuples.get(0));
-		Assert.assertEquals("quotient is", new Float(2.2),
-				fqSink.collectedTuples.get(0));
-		Assert.assertEquals("quotient is", new Long(1),
-				lrSink.collectedTuples.get(0));
-		Assert.assertEquals("quotient is", 1, irSink.collectedTuples.get(0));
-		Assert.assertEquals("quotient is", 1.0, drSink.collectedTuples.get(0));
-		Assert.assertEquals("quotient is", new Float(1.0),
-				frSink.collectedTuples.get(0));
-	}
+    Assert.assertEquals("quotient is", new Long(2),
+        lqSink.collectedTuples.get(0));
+    Assert.assertEquals("quotient is", 2, iqSink.collectedTuples.get(0));
+    Assert.assertEquals("quotient is", 2.2, dqSink.collectedTuples.get(0));
+    Assert.assertEquals("quotient is", new Float(2.2),
+        fqSink.collectedTuples.get(0));
+    Assert.assertEquals("quotient is", new Long(1),
+        lrSink.collectedTuples.get(0));
+    Assert.assertEquals("quotient is", 1, irSink.collectedTuples.get(0));
+    Assert.assertEquals("quotient is", 1.0, drSink.collectedTuples.get(0));
+    Assert.assertEquals("quotient is", new Float(1.0),
+        frSink.collectedTuples.get(0));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/math/ExceptMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/ExceptMapTest.java b/library/src/test/java/com/datatorrent/lib/math/ExceptMapTest.java
index 2751842..a113d5f 100644
--- a/library/src/test/java/com/datatorrent/lib/math/ExceptMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/ExceptMapTest.java
@@ -66,19 +66,17 @@ public class ExceptMapTest
 
     // One for each key
     Assert.assertEquals("number emitted tuples", 1, exceptSink.count);
-    for (Map.Entry<String, Number> e : ((HashMap<String, Number>) exceptSink.tuple)
-      .entrySet()) {
+    for (Map.Entry<String, Number> e : ((HashMap<String, Number>)exceptSink.tuple)
+        .entrySet()) {
       if (e.getKey().equals("a")) {
         Assert.assertEquals("emitted value for 'a' was ", new Double(2), e
-          .getValue().doubleValue(), 0);
-      }
-      else if (e.getKey().equals("b")) {
+            .getValue().doubleValue(), 0);
+      } else if (e.getKey().equals("b")) {
         Assert.assertEquals("emitted tuple for 'b' was ", new Double(20), e
-          .getValue().doubleValue(), 0);
-      }
-      else if (e.getKey().equals("c")) {
+            .getValue().doubleValue(), 0);
+      } else if (e.getKey().equals("c")) {
         Assert.assertEquals("emitted tuple for 'c' was ", new Double(1000), e
-          .getValue().doubleValue(), 0);
+            .getValue().doubleValue(), 0);
       }
     }
   }


[11/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/function/FirstLastFunction.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/function/FirstLastFunction.java b/library/src/main/java/com/datatorrent/lib/streamquery/function/FirstLastFunction.java
index 0a4fd1a..db2c2b7 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/function/FirstLastFunction.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/function/FirstLastFunction.java
@@ -51,7 +51,7 @@ public class FirstLastFunction extends FunctionIndex
    * @param  alias   Alias name for output.
    * @param  isFirst return first value if true.
    */
-  public FirstLastFunction(@NotNull String column,String alias, boolean isLast)
+  public FirstLastFunction(@NotNull String column, String alias, boolean isLast)
   {
     super(column, alias);
     isFirst = !isLast;
@@ -63,14 +63,20 @@ public class FirstLastFunction extends FunctionIndex
   @Override
   public Object compute(@NotNull ArrayList<Map<String, Object>> rows) throws Exception
   {
-    if (rows.size() == 0) return null;
+    if (rows.size() == 0) {
+      return null;
+    }
     if (isFirst) {
-      for (int i=0; i < rows.size(); i++) {
-        if (rows.get(i).get(column) != null) return rows.get(i).get(column);
+      for (int i = 0; i < rows.size(); i++) {
+        if (rows.get(i).get(column) != null) {
+          return rows.get(i).get(column);
+        }
       }
     } else {
-      for (int i= (rows.size()-1); i >= 0;  i--) {
-        if (rows.get(i).get(column) != null) return rows.get(i).get(column);
+      for (int i = (rows.size() - 1); i >= 0; i--) {
+        if (rows.get(i).get(column) != null) {
+          return rows.get(i).get(column);
+        }
       }
     }
     return null;
@@ -83,9 +89,11 @@ public class FirstLastFunction extends FunctionIndex
   @Override
   protected String aggregateName()
   {
-    if (!StringUtils.isEmpty(alias)) return alias;
+    if (!StringUtils.isEmpty(alias)) {
+      return alias;
+    }
     if (isFirst) {
-        return "FIRST(" + column + ")";
+      return "FIRST(" + column + ")";
     }
     return "LAST(" + column + ")";
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/function/FunctionIndex.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/function/FunctionIndex.java b/library/src/main/java/com/datatorrent/lib/streamquery/function/FunctionIndex.java
index 57376e4..918ca89 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/function/FunctionIndex.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/function/FunctionIndex.java
@@ -36,7 +36,7 @@ import javax.validation.constraints.NotNull;
  * @tags sql aggregate
  * @since 0.3.4
  */
-abstract public class FunctionIndex
+public abstract class FunctionIndex
 {
   /**
    * Column name.
@@ -64,13 +64,13 @@ abstract public class FunctionIndex
    * @param rows Tuple list over application window.
    * @return aggregate result object.
    */
-  abstract public Object compute(@NotNull ArrayList<Map<String, Object>> rows) throws Exception;
+  public abstract Object compute(@NotNull ArrayList<Map<String, Object>> rows) throws Exception;
 
   /**
    * Get aggregate output value name.
    * @return name string.
    */
-  abstract protected String aggregateName();
+  protected abstract String aggregateName();
 
   /**
    * Apply compute function to given rows and store result in collect by output value name.
@@ -78,10 +78,16 @@ abstract public class FunctionIndex
    */
   public void filter(ArrayList<Map<String, Object>> rows, Map<String, Object> collect) throws Exception
   {
-    if (rows == null) return;
+    if (rows == null) {
+      return;
+    }
     String name = column;
-    if (alias != null) name = alias;
-    if (name == null) name = aggregateName();
+    if (alias != null) {
+      name = alias;
+    }
+    if (name == null) {
+      name = aggregateName();
+    }
     collect.put(name, compute(rows));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/function/MaxMinFunction.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/function/MaxMinFunction.java b/library/src/main/java/com/datatorrent/lib/streamquery/function/MaxMinFunction.java
index 596e080..f02e82c 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/function/MaxMinFunction.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/function/MaxMinFunction.java
@@ -67,8 +67,10 @@ public class MaxMinFunction extends FunctionIndex
     double minMax = 0.0;
     for (Map<String, Object> row : rows) {
       double value = ((Number)row.get(column)).doubleValue();
-      if ((isMax && (minMax < value))||(!isMax && (minMax > value))) minMax = value;
-     }
+      if ((isMax && (minMax < value)) || (!isMax && (minMax > value))) {
+        minMax = value;
+      }
+    }
     return minMax;
   }
 
@@ -79,8 +81,12 @@ public class MaxMinFunction extends FunctionIndex
   @Override
   protected String aggregateName()
   {
-    if (!StringUtils.isEmpty(alias)) return alias;
-    if (isMax) return "MAX(" + column + ")";
+    if (!StringUtils.isEmpty(alias)) {
+      return alias;
+    }
+    if (isMax) {
+      return "MAX(" + column + ")";
+    }
     return "MIN(" + column + ")";
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/function/SumFunction.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/function/SumFunction.java b/library/src/main/java/com/datatorrent/lib/streamquery/function/SumFunction.java
index 484aa95..02186cd 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/function/SumFunction.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/function/SumFunction.java
@@ -45,8 +45,10 @@ public class SumFunction extends FunctionIndex
   {
     Double result = 0.0;
     for (Map<String, Object> row : rows) {
-        if (!row.containsKey(column)) continue;
-        result += ((Number)row.get(column)).doubleValue();
+      if (!row.containsKey(column)) {
+        continue;
+      }
+      result += ((Number)row.get(column)).doubleValue();
     }
     return result;
   }
@@ -54,7 +56,7 @@ public class SumFunction extends FunctionIndex
   @Override
   protected String aggregateName()
   {
-   return "Sum(" + column;
+    return "Sum(" + column;
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/BinaryExpression.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/BinaryExpression.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/BinaryExpression.java
index 23fa86b..21c1d11 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/BinaryExpression.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/BinaryExpression.java
@@ -30,7 +30,7 @@ import javax.validation.constraints.NotNull;
  * @tags alias
  * @since 0.3.4
  */
-abstract public class BinaryExpression  implements Index
+public abstract class BinaryExpression  implements Index
 {
   /**
    * Left column name argument for expression.
@@ -50,9 +50,9 @@ abstract public class BinaryExpression  implements Index
   protected String alias;
 
   /**
-   * @param Left column name argument for expression.
-   * @param Right column name argument for expression.
-   * @param Alias name for output field.
+   * @param left column name argument for expression.
+   * @param right column name argument for expression.
+   * @param alias name for output field.
    */
   public BinaryExpression(@NotNull String left, @NotNull String right, String alias)
   {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/ColumnIndex.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/ColumnIndex.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/ColumnIndex.java
index 78cc547..a4ad2b7 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/ColumnIndex.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/ColumnIndex.java
@@ -53,7 +53,9 @@ public class ColumnIndex implements Index
   public void filter(@NotNull  Map<String, Object> row, @NotNull  Map<String, Object> collect)
   {
     String name = getColumn();
-    if (alias != null) name = alias;
+    if (alias != null) {
+      name = alias;
+    }
     collect.put(name, row.get(name));
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/Index.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/Index.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/Index.java
index 890185b..5067d00 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/Index.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/Index.java
@@ -35,5 +35,5 @@ public interface Index
   /**
    * Function can key/value hash map, does metric implemented by sub class.
    */
-	public void filter(@NotNull Map<String,Object> row, @NotNull Map<String, Object> collect);
+  public void filter(@NotNull Map<String, Object> row, @NotNull Map<String, Object> collect);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/MidIndex.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/MidIndex.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/MidIndex.java
index 89456b2..931ddaa 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/MidIndex.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/MidIndex.java
@@ -38,22 +38,26 @@ public class MidIndex extends ColumnIndex
   public MidIndex(@NotNull String column, String alias, int start)
   {
     super(column, alias);
-    assert(start >= 0);
+    assert (start >= 0);
     this.start = start;
   }
 
   @Override
   public void filter(@NotNull  Map<String, Object> row, @NotNull  Map<String, Object> collect)
   {
-    if (!row.containsKey(column)) return;
+    if (!row.containsKey(column)) {
+      return;
+    }
     if (!(row.get(column) instanceof String)) {
-      assert(false);
+      assert (false);
     }
     String name = getColumn();
-    if (alias != null) name = alias;
+    if (alias != null) {
+      name = alias;
+    }
 
     int endIndex = start + length;
-    if ((length == 0)||(endIndex > ((String)row.get(column)).length())) {
+    if ((length == 0) || (endIndex > ((String)row.get(column)).length())) {
       collect.put(name, row.get(column));
     } else {
       collect.put(name, ((String)row.get(column)).substring(start, endIndex));
@@ -67,7 +71,7 @@ public class MidIndex extends ColumnIndex
 
   public void setLength(int length)
   {
-    assert(length > 0);
+    assert (length > 0);
     this.length = length;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/NegateExpression.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/NegateExpression.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/NegateExpression.java
index 792ad80..969e3af 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/NegateExpression.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/NegateExpression.java
@@ -40,7 +40,9 @@ public class NegateExpression extends UnaryExpression
   public NegateExpression(@Null String column, String alias)
   {
     super(column, alias);
-    if (this.alias == null)  this.alias = "NEGATE(" + column + ")";
+    if (this.alias == null) {
+      this.alias = "NEGATE(" + column + ")";
+    }
   }
 
   /* (non-Javadoc)
@@ -49,7 +51,9 @@ public class NegateExpression extends UnaryExpression
   @Override
   public void filter(Map<String, Object> row, Map<String, Object> collect)
   {
-    if (!row.containsKey(column)) return;
+    if (!row.containsKey(column)) {
+      return;
+    }
     collect.put(alias, -((Number)row.get(column)).doubleValue());
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/RoundDoubleIndex.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/RoundDoubleIndex.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/RoundDoubleIndex.java
index 46a563f..90e16a1 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/RoundDoubleIndex.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/RoundDoubleIndex.java
@@ -37,17 +37,23 @@ public class RoundDoubleIndex  extends ColumnIndex
   {
     super(column, alias);
     rounder = 1;
-    if (numDecimals > 0) rounder = (int) Math.pow(10, numDecimals);
+    if (numDecimals > 0) {
+      rounder = (int)Math.pow(10, numDecimals);
+    }
   }
 
   @Override
   public void filter(@NotNull  Map<String, Object> row, @NotNull  Map<String, Object> collect)
   {
-    if (!row.containsKey(column)) return;
-    double value = (Double) row.get(column);
-    value = Math.round(value * rounder)/rounder;
+    if (!row.containsKey(column)) {
+      return;
+    }
+    double value = (Double)row.get(column);
+    value = Math.round(value * rounder) / rounder;
     String name = getColumn();
-    if (alias != null) name = alias;
+    if (alias != null) {
+      name = alias;
+    }
     collect.put(name, value);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/StringCaseIndex.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/StringCaseIndex.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/StringCaseIndex.java
index 2d792ff..2c49a79 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/StringCaseIndex.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/StringCaseIndex.java
@@ -42,13 +42,17 @@ public class StringCaseIndex extends  ColumnIndex
   @Override
   public void filter(@NotNull  Map<String, Object> row, @NotNull  Map<String, Object> collect)
   {
-    if (!row.containsKey(column)) return;
+    if (!row.containsKey(column)) {
+      return;
+    }
     if (!(row.get(column) instanceof String)) {
-      assert(false);
+      assert (false);
     }
 
     String name = getColumn();
-    if (alias != null) name = alias;
+    if (alias != null) {
+      name = alias;
+    }
     if (toUpperCase) {
       collect.put(name, ((String)row.get(column)).toUpperCase());
     } else {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/StringLenIndex.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/StringLenIndex.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/StringLenIndex.java
index 4fa05b5..4dbfee1 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/StringLenIndex.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/StringLenIndex.java
@@ -40,13 +40,17 @@ public class StringLenIndex  extends ColumnIndex
   @Override
   public void filter(@NotNull  Map<String, Object> row, @NotNull  Map<String, Object> collect)
   {
-    if (!row.containsKey(column)) return;
+    if (!row.containsKey(column)) {
+      return;
+    }
     if (!(row.get(column) instanceof String)) {
-      assert(false);
+      assert (false);
     }
 
     String name = getColumn();
-    if (alias != null) name = alias;
+    if (alias != null) {
+      name = alias;
+    }
     collect.put(name, ((String)row.get(column)).length());
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/SumExpression.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/SumExpression.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/SumExpression.java
index acddf51..a0144da 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/SumExpression.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/SumExpression.java
@@ -36,14 +36,16 @@ public class SumExpression extends BinaryExpression
 {
 
   /**
-   * @param Left column name argument for expression.
-   * @param Right column name argument for expression.
-   * @param Alias name for output field.
+   * @param left column name argument for expression.
+   * @param right column name argument for expression.
+   * @param alias name for output field.
    */
   public SumExpression(@NotNull String left, @NotNull String right, String alias)
   {
     super(left, right, alias);
-    if (this.alias == null) this.alias = "SUM(" + left + "," + right + ")";
+    if (this.alias == null) {
+      this.alias = "SUM(" + left + "," + right + ")";
+    }
   }
 
   /* sum column values.
@@ -52,7 +54,9 @@ public class SumExpression extends BinaryExpression
   @Override
   public void filter(Map<String, Object> row, Map<String, Object> collect)
   {
-    if (!row.containsKey(left) || !row.containsKey(right)) return;
+    if (!row.containsKey(left) || !row.containsKey(right)) {
+      return;
+    }
     collect.put(alias, ((Number)row.get(left)).doubleValue() + ((Number)row.get(right)).doubleValue());
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/index/UnaryExpression.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/index/UnaryExpression.java b/library/src/main/java/com/datatorrent/lib/streamquery/index/UnaryExpression.java
index ea52986..45e90ec 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/index/UnaryExpression.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/index/UnaryExpression.java
@@ -31,7 +31,7 @@ import javax.validation.constraints.NotNull;
  * @tags unary, alias
  * @since 0.3.4
  */
-abstract public class UnaryExpression  implements Index
+public abstract class UnaryExpression  implements Index
 {
   /**
    * Column name argument for unary expression.
@@ -45,8 +45,8 @@ abstract public class UnaryExpression  implements Index
   protected String alias;
 
   /**
-   * @param Column name argument for unary expression.
-   * @param Alias name for output field.
+   * @param column name argument for unary expression.
+   * @param alias name for output field.
    */
   public UnaryExpression(@NotNull String column, String alias)
   {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/ArrayListTestSink.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/ArrayListTestSink.java b/library/src/main/java/com/datatorrent/lib/testbench/ArrayListTestSink.java
index c6df099..93889be 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/ArrayListTestSink.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/ArrayListTestSink.java
@@ -18,12 +18,13 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.api.Sink;
-
 import java.util.ArrayList;
 import java.util.HashMap;
+
 import org.apache.commons.lang.mutable.MutableInt;
 
+import com.datatorrent.api.Sink;
+
 /**
  * A sink implementation to collect expected test results in a HashMap.
  * <p>
@@ -61,7 +62,7 @@ public class ArrayListTestSink<T> implements Sink<T>
   {
     this.count++;
     @SuppressWarnings("unchecked")
-    ArrayList<Object> list = (ArrayList<Object>) tuple;
+    ArrayList<Object> list = (ArrayList<Object>)tuple;
     for (Object o: list) {
       MutableInt val = map.get(o);
       if (val == null) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/CollectorTestSink.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/CollectorTestSink.java b/library/src/main/java/com/datatorrent/lib/testbench/CollectorTestSink.java
index 778a82a..2fd776e 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/CollectorTestSink.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/CollectorTestSink.java
@@ -18,11 +18,11 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.api.Sink;
-
 import java.util.ArrayList;
 import java.util.List;
 
+import com.datatorrent.api.Sink;
+
 /**
  * A sink implementation to collect expected test results.
  * <p>
@@ -33,7 +33,7 @@ import java.util.List;
  */
 public class CollectorTestSink<T> implements Sink<T>
 {
-  final public List<T> collectedTuples = new ArrayList<T>();
+  public final List<T> collectedTuples = new ArrayList<T>();
 
   /**
    * clears data
@@ -46,10 +46,10 @@ public class CollectorTestSink<T> implements Sink<T>
   @Override
   public void put(T payload)
   {
-      synchronized (collectedTuples) {
-        collectedTuples.add(payload);
-        collectedTuples.notifyAll();
-      }
+    synchronized (collectedTuples) {
+      collectedTuples.add(payload);
+      collectedTuples.notifyAll();
+    }
   }
 
   public void waitForResultCount(int count, long timeoutMillis) throws InterruptedException

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/CompareFilterTuples.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/CompareFilterTuples.java b/library/src/main/java/com/datatorrent/lib/testbench/CompareFilterTuples.java
index d338d34..a309b89 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/CompareFilterTuples.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/CompareFilterTuples.java
@@ -21,9 +21,9 @@ package com.datatorrent.lib.testbench;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * <p>Implements Compare Filter Tuples class.</p>
@@ -35,9 +35,12 @@ import com.datatorrent.api.DefaultOutputPort;
  */
 public class CompareFilterTuples<k> extends BaseOperator
 {
-	// Compare type function
+  // Compare type function
   private Compare compareType = Compare.Equal;
-  public enum Compare { Smaller, Equal, Greater }
+  public enum Compare
+  {
+    Smaller, Equal, Greater
+  }
 
   /**
    * Compare the incoming value with the Property value.
@@ -45,69 +48,85 @@ public class CompareFilterTuples<k> extends BaseOperator
   */
   public void setCompareType(Compare type)
   {
-  	compareType = type;
+    compareType = type;
   }
 
   // compare value
   private int value;
   public void setValue(int value)
   {
-  	this.value = value;
+    this.value = value;
   }
 
   // Collected result tuples
   private Map<k, Integer> result;
 
-        /**
-	 * Input port that takes a map of integer values.
-	 */
-	public final transient DefaultInputPort<Map<k, Integer>> inport = new DefaultInputPort<Map<k, Integer>>() {
+  /**
+   * Input port that takes a map of integer values.
+   */
+
+  public final transient DefaultInputPort<Map<k, Integer>> inport = new DefaultInputPort<Map<k, Integer>>()
+  {
     @Override
-    public void process(Map<k, Integer> map) {
-    	for(Map.Entry<k, Integer> entry : map.entrySet())
-    	{
-    		if ( compareType == Compare.Equal ) if(entry.getValue().intValue() == value) result.put(entry.getKey(), entry.getValue());
-    		if ( compareType == Compare.Greater ) if(entry.getValue().intValue() > value) result.put(entry.getKey(), entry.getValue());
-    		if ( compareType == Compare.Smaller ) if(entry.getValue().intValue() < value) result.put(entry.getKey(), entry.getValue());
-    	}
+    public void process(Map<k, Integer> map)
+    {
+      for (Map.Entry<k, Integer> entry : map.entrySet()) {
+        if (compareType == Compare.Equal) {
+          if (entry.getValue().intValue() == value) {
+            result.put(entry.getKey(),
+                entry.getValue());
+          }
+        }
+        if (compareType == Compare.Greater) {
+          if (entry.getValue().intValue() > value) {
+            result.put(entry.getKey(),
+                entry.getValue());
+          }
+        }
+        if (compareType == Compare.Smaller) {
+          if (entry.getValue().intValue() < value) {
+            result.put(entry.getKey(),
+                entry.getValue());
+          }
+        }
+      }
     }
-	};
+  };
 
-	/**
-	 * Output port that emits a map of integer values.
-	 */
-	public final transient DefaultOutputPort<Map<k, Integer>> outport = new DefaultOutputPort<Map<k, Integer>>();
+  /**
+   * Output port that emits a map of integer values.
+   */
+  public final transient DefaultOutputPort<Map<k, Integer>> outport = new DefaultOutputPort<Map<k, Integer>>();
 
         /**
-	 * Output redis port that emits a map of &lt;integer,string&gt; values.
-	 */
-	public final transient DefaultOutputPort<Map<Integer, String>> redisport = new DefaultOutputPort<Map<Integer, String>>();
+   * Output redis port that emits a map of &lt;integer,string&gt; values.
+   */
+  public final transient DefaultOutputPort<Map<Integer, String>> redisport = new DefaultOutputPort<Map<Integer, String>>();
 
-	@Override
-	public void beginWindow(long windowId)
-	{
-		result  = new HashMap<k, Integer>();
-	}
+  @Override
+  public void beginWindow(long windowId)
+  {
+    result  = new HashMap<k, Integer>();
+  }
 
-	@Override
-	public void endWindow()
-	{
-		outport.emit(result);
+  @Override
+  public void endWindow()
+  {
+    outport.emit(result);
 
-		int numOuts = 1;
-		Integer total = 0;
-		for (Map.Entry<k, Integer>  entry : result.entrySet())
-		{
-			Map<Integer, String> tuple = new HashMap<Integer, String>();
-			tuple.put(numOuts++, entry.getKey().toString());
-			redisport.emit(tuple);
-			total += entry.getValue();
-		}
-		Map<Integer, String> tuple = new HashMap<Integer, String>();
-		tuple.put(numOuts++, total.toString());
-		redisport.emit(tuple);
-		tuple = new HashMap<Integer, String>();
-		tuple.put(0, new Integer(numOuts).toString());
-		redisport.emit(tuple);
-	}
+    int numOuts = 1;
+    Integer total = 0;
+    for (Map.Entry<k, Integer>  entry : result.entrySet()) {
+      Map<Integer, String> tuple = new HashMap<Integer, String>();
+      tuple.put(numOuts++, entry.getKey().toString());
+      redisport.emit(tuple);
+      total += entry.getValue();
+    }
+    Map<Integer, String> tuple = new HashMap<Integer, String>();
+    tuple.put(numOuts++, total.toString());
+    redisport.emit(tuple);
+    tuple = new HashMap<Integer, String>();
+    tuple.put(0, new Integer(numOuts).toString());
+    redisport.emit(tuple);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/CountAndLastTupleTestSink.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/CountAndLastTupleTestSink.java b/library/src/main/java/com/datatorrent/lib/testbench/CountAndLastTupleTestSink.java
index 9e4f8b5..73506ae 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/CountAndLastTupleTestSink.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/CountAndLastTupleTestSink.java
@@ -43,7 +43,7 @@ public class CountAndLastTupleTestSink<T> extends CountTestSink<T>
   @Override
   public void put(T tuple)
   {
-      this.tuple = tuple;
-      count++;
+    this.tuple = tuple;
+    count++;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/CountOccurance.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/CountOccurance.java b/library/src/main/java/com/datatorrent/lib/testbench/CountOccurance.java
index 29343c9..9f6df10 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/CountOccurance.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/CountOccurance.java
@@ -22,10 +22,10 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * <p>A base implementation of an operator which does count occurrence.</p>
@@ -37,70 +37,70 @@ import com.datatorrent.api.Context.OperatorContext;
  */
 public class CountOccurance<k> extends BaseOperator
 {
-	private Map<k, Integer> collect;
-	public final transient DefaultInputPort<k> inport = new DefaultInputPort<k>() {
+  private Map<k, Integer> collect;
+  public final transient DefaultInputPort<k> inport = new DefaultInputPort<k>()
+  {
     @Override
-    public void process(k s) {
-    	if (collect.containsKey(s))
-    	{
-    		Integer value = (Integer)collect.remove(s);
-    		collect.put(s, new Integer(value+1));
-    	} else {
-    		collect.put(s, new Integer(1));
-    	}
+    public void process(k s)
+    {
+      if (collect.containsKey(s)) {
+        Integer value = (Integer)collect.remove(s);
+        collect.put(s, new Integer(value + 1));
+      } else {
+        collect.put(s, new Integer(1));
+      }
     }
-	};
+  };
 
-	@Override
-	public void setup(OperatorContext context)
-	{
-	}
+  @Override
+  public void setup(OperatorContext context)
+  {
+  }
 
-	@Override
-	public void teardown()
-	{
-	}
+  @Override
+  public void teardown()
+  {
+  }
 
-	@Override
-	public void beginWindow(long windowId)
-	{
-		collect  = new HashMap<k, Integer>();
-	}
+  @Override
+  public void beginWindow(long windowId)
+  {
+    collect  = new HashMap<k, Integer>();
+  }
 
-	/**
-	 * Output port that emits a map of integer values.
-	 */
-	public final transient DefaultOutputPort<Map<k, Integer>> outport = new DefaultOutputPort<Map<k, Integer>>();
+  /**
+   * Output port that emits a map of integer values.
+   */
+  public final transient DefaultOutputPort<Map<k, Integer>> outport = new DefaultOutputPort<Map<k, Integer>>();
 
         /**
-	 * Output dimensions port that emits a map of &lt;string,object&gt; values.
-	 */
-	public final transient DefaultOutputPort<Map<String, Object>> dimensionOut = new DefaultOutputPort<Map<String, Object>>();
+   * Output dimensions port that emits a map of &lt;string,object&gt; values.
+   */
+  public final transient DefaultOutputPort<Map<String, Object>> dimensionOut = new DefaultOutputPort<Map<String, Object>>();
 
-        /**
-	 * Output total port that emits a map of &lt;string,integer&gt; count values.
-	 */
-        public final transient DefaultOutputPort<Map<String,Integer>> total = new DefaultOutputPort<Map<String,Integer>>();
+  /**
+   * Output total port that emits a map of &lt;string,integer&gt; count values.
+   */
+  public final transient DefaultOutputPort<Map<String, Integer>> total = new DefaultOutputPort<Map<String, Integer>>();
 
-	@Override
-	public void endWindow()
-	{
-		outport.emit(collect);
-		long timestamp = new Date().getTime();
-		int allcount = 0;
-		for(Map.Entry<k, Integer> entry : collect.entrySet())
-		{
-			Map<String, Object> map = new HashMap<String, Object>();
-			map.put("timestamp", timestamp);
-			map.put("item", entry.getKey());
-			map.put("view", entry.getValue());
-			dimensionOut.emit(map);
-			allcount += entry.getValue();
-		}
-		Map<String, Integer> map = new HashMap<String, Integer>();
-		map.put("total", new Integer(allcount));
-		total.emit(map);
-		collect = null;
-		collect  = new HashMap<k, Integer>();
-	}
+  @Override
+  public void endWindow()
+  {
+    outport.emit(collect);
+    long timestamp = new Date().getTime();
+    int allcount = 0;
+    for (Map.Entry<k, Integer> entry : collect.entrySet()) {
+      Map<String, Object> map = new HashMap<String, Object>();
+      map.put("timestamp", timestamp);
+      map.put("item", entry.getKey());
+      map.put("view", entry.getValue());
+      dimensionOut.emit(map);
+      allcount += entry.getValue();
+    }
+    Map<String, Integer> map = new HashMap<String, Integer>();
+    map.put("total", new Integer(allcount));
+    total.emit(map);
+    collect = null;
+    collect  = new HashMap<k, Integer>();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/CountTestSink.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/CountTestSink.java b/library/src/main/java/com/datatorrent/lib/testbench/CountTestSink.java
index cc7a70b..e6a85c9 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/CountTestSink.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/CountTestSink.java
@@ -43,12 +43,11 @@ public class CountTestSink<T> extends CollectorTestSink<T>
   }
 
   /**
-   *
    * @param payload
    */
   @Override
   public void put(T payload)
   {
-      count++;
+    count++;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/EventClassifier.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/EventClassifier.java b/library/src/main/java/com/datatorrent/lib/testbench/EventClassifier.java
index db02f22..547340e 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/EventClassifier.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/EventClassifier.java
@@ -18,16 +18,16 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * An implementation of BaseOperator that creates a load with pair of keys by taking in an input stream event and adding to incoming keys
  * to create a new tuple that is emitted on output port data.
@@ -72,51 +72,47 @@ public class EventClassifier extends BaseOperator
     @Override
     public void process(HashMap<String, Double> tuple)
     {
-    for (Map.Entry<String, Double> e: tuple.entrySet()) {
-      String inkey = e.getKey();
-      ArrayList<Integer> alist = null;
-      if (inkeys != null) {
-        alist = inkeys.get(e.getKey());
-      }
-      if (alist == null) {
-        alist = noweight;
-      }
-
-      // now alist are the weights
-      int rval = random.nextInt(alist.get(alist.size() - 1));
-      int j = 0;
-      int wval = 0;
-      for (Integer ew: alist) {
-        wval += ew.intValue();
-        if (wval >= rval) {
-          break;
-        }
-        j++;
-      }
-      HashMap<String, Double> otuple = new HashMap<String, Double>(1);
-      String key = wtostr_index.get(j); // the key
-      Double keyval = null;
-      if (hasvalues) {
-        if (voper == value_operation.VOPR_REPLACE) { // replace the incoming value
-          keyval = keys.get(key);
+      for (Map.Entry<String, Double> e : tuple.entrySet()) {
+        String inkey = e.getKey();
+        ArrayList<Integer> alist = null;
+        if (inkeys != null) {
+          alist = inkeys.get(e.getKey());
         }
-        else if (voper == value_operation.VOPR_ADD) {
-          keyval = keys.get(key) + e.getValue();
+        if (alist == null) {
+          alist = noweight;
         }
-        else if (voper == value_operation.VOPR_MULT) {
-          keyval = keys.get(key) * e.getValue();
 
+        // now alist are the weights
+        int rval = random.nextInt(alist.get(alist.size() - 1));
+        int j = 0;
+        int wval = 0;
+        for (Integer ew : alist) {
+          wval += ew.intValue();
+          if (wval >= rval) {
+            break;
+          }
+          j++;
         }
-        else if (voper == value_operation.VOPR_APPEND) { // not supported yet
-          keyval = keys.get(key);
+        HashMap<String, Double> otuple = new HashMap<String, Double>(1);
+        String key = wtostr_index.get(j); // the key
+        Double keyval = null;
+        if (hasvalues) {
+          if (voper == value_operation.VOPR_REPLACE) { // replace the incoming value
+            keyval = keys.get(key);
+          } else if (voper == value_operation.VOPR_ADD) {
+            keyval = keys.get(key) + e.getValue();
+          } else if (voper == value_operation.VOPR_MULT) {
+            keyval = keys.get(key) * e.getValue();
+
+          } else if (voper == value_operation.VOPR_APPEND) { // not supported yet
+            keyval = keys.get(key);
+          }
+        } else { // pass on the value from incoming tuple
+          keyval = e.getValue();
         }
+        otuple.put(key + "," + inkey, keyval);
+        data.emit(otuple);
       }
-      else { // pass on the value from incoming tuple
-        keyval = e.getValue();
-      }
-      otuple.put(key + "," + inkey, keyval);
-      data.emit(otuple);
-    }
     }
   };
 
@@ -124,7 +120,6 @@ public class EventClassifier extends BaseOperator
    * Output data port that emits a hashmap of &lt;string,double&gt;.
    */
   public final transient DefaultOutputPort<HashMap<String, Double>> data = new DefaultOutputPort<HashMap<String, Double>>();
-;
 
   HashMap<String, Double> keys = new HashMap<String, Double>();
   HashMap<Integer, String> wtostr_index = new HashMap<Integer, String>();
@@ -139,7 +134,8 @@ public class EventClassifier extends BaseOperator
   enum value_operation
   {
     VOPR_REPLACE, VOPR_ADD, VOPR_MULT, VOPR_APPEND
-  };
+  }
+
   value_operation voper = value_operation.VOPR_REPLACE;
 
 
@@ -163,7 +159,7 @@ public class EventClassifier extends BaseOperator
     voper = value_operation.VOPR_MULT;
   }
 
-   public void setKeyWeights(HashMap<String, ArrayList<Integer>> map)
+  public void setKeyWeights(HashMap<String, ArrayList<Integer>> map)
   {
     if (inkeys == null) {
       inkeys = new HashMap<String, ArrayList<Integer>>();
@@ -183,7 +179,7 @@ public class EventClassifier extends BaseOperator
     }
   }
 
-   @Override
+  @Override
   public void setup(OperatorContext context)
   {
     noweight = new ArrayList<Integer>();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/EventClassifierNumberToHashDouble.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/EventClassifierNumberToHashDouble.java b/library/src/main/java/com/datatorrent/lib/testbench/EventClassifierNumberToHashDouble.java
index 0158f3b..66ad12e 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/EventClassifierNumberToHashDouble.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/EventClassifierNumberToHashDouble.java
@@ -18,16 +18,18 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-
 import java.util.HashMap;
+
 import javax.validation.constraints.NotNull;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * An implementation of BaseOperator that creates a load with pair of keys by taking in an input stream event and adding to incoming keys
  * to create a new tuple of Hashmap &lt;String,Double&gt; that is emitted on output port data.
@@ -95,7 +97,7 @@ public class EventClassifierNumberToHashDouble<K extends Number> extends BaseOpe
   int seed = 0;
   int seed_size = 1;
 
-  String [] keys = null;
+  String[] keys = null;
 
   /**
    * setup before dag is run (pre-runtime, and post compile time)
@@ -116,8 +118,7 @@ public class EventClassifierNumberToHashDouble<K extends Number> extends BaseOpe
         Integer ival = i;
         keys[i] = getKey() + ival.toString();
       }
-    }
-    else {
+    } else {
       for (int i = s_end; i <= s_start; i++) {
         Integer ival = i;
         keys[i] = getKey() + ival.toString();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/EventGenerator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/EventGenerator.java b/library/src/main/java/com/datatorrent/lib/testbench/EventGenerator.java
index 8dcf264..6b2ed8f 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/EventGenerator.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/EventGenerator.java
@@ -105,7 +105,7 @@ public class EventGenerator implements InputOperator
   HashMap<Integer, String> wtostr_index = new HashMap<Integer, String>();
   ArrayList<Integer> weights;
   int total_weight = 0;
-  private transient final Random random = new Random();
+  private final transient Random random = new Random();
   public static final int ROLLING_WINDOW_COUNT_DEFAULT = 1;
   @Min(1)
   private int rolling_window_count = ROLLING_WINDOW_COUNT_DEFAULT;
@@ -157,14 +157,12 @@ public class EventGenerator implements InputOperator
         }
         weights.add(Integer.parseInt(weightsArray[i]));
         total_weight += Integer.parseInt(weightsArray[i]);
-      }
-      else {
+      } else {
         total_weight += 1;
       }
       if ((valuesArray != null) && valuesArray.length != 0) {
         keys.put(s, new Double(Double.parseDouble(valuesArray[i])));
-      }
-      else {
+      } else {
         keys.put(s, 0.0);
       }
       wtostr_index.put(i, s);
@@ -197,8 +195,7 @@ public class EventGenerator implements InputOperator
       long average;
       if (rolling_window_count == 1) {
         average = (tcount * 1000) / elapsedTime;
-      }
-      else { // use tuple_numbers
+      } else { // use tuple_numbers
         int slots;
         if (count_denominator == rolling_window_count) {
           tuple_numbers[tuple_index] = tcount;
@@ -208,8 +205,7 @@ public class EventGenerator implements InputOperator
           if (tuple_index == rolling_window_count) {
             tuple_index = 0;
           }
-        }
-        else {
+        } else {
           tuple_numbers[count_denominator - 1] = tcount;
           time_numbers[count_denominator - 1] = elapsedTime;
           slots = count_denominator;
@@ -259,7 +255,7 @@ public class EventGenerator implements InputOperator
 
   /**
    * Comma separated strings which can be used as keys
-   * @param value
+   * @param keys
    */
   public void setKeysHelper(String keys)
   {
@@ -275,14 +271,13 @@ public class EventGenerator implements InputOperator
 
   /**
    * Comma separated values which are used as weight for the same indexed keys.
-   * @param value
+   * @param weight
    */
   public void setWeightsHelper(String weight)
   {
     if (weight.isEmpty()) {
       weightsArray = null;
-    }
-    else {
+    } else {
       weightsArray = weight.split(",");
     }
   }
@@ -300,8 +295,7 @@ public class EventGenerator implements InputOperator
   {
     if (value.isEmpty()) {
       valuesArray = null;
-    }
-    else {
+    } else {
       valuesArray = value.split(",");
     }
   }
@@ -341,8 +335,7 @@ public class EventGenerator implements InputOperator
           }
           j++;
         }
-      }
-      else {
+      } else {
         j++;
         j = j % keys.size();
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/EventIncrementer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/EventIncrementer.java b/library/src/main/java/com/datatorrent/lib/testbench/EventIncrementer.java
index 3668d5c..46b684d 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/EventIncrementer.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/EventIncrementer.java
@@ -18,14 +18,15 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.lib.util.KeyValPair;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.lib.util.KeyValPair;
+
 /**
  * Creates a random movement by taking in a seed stream and incrementing this data.
  * <p>
@@ -74,8 +75,7 @@ public class EventIncrementer extends BaseOperator
         if (keys.length != e.getValue().size()) { // bad seed
           return;
           // emit error tuple here
-        }
-        else {
+        } else {
           ArrayList<KeyValPair<String, Double>> alist = new ArrayList<KeyValPair<String, Double>>(keys.length);
           int j = 0;
           for (Integer s: e.getValue()) {
@@ -189,8 +189,7 @@ public class EventIncrementer extends BaseOperator
     double range = high - low;
     if (increment > range) { // bad data, do nothing
       ret = current;
-    }
-    else {
+    } else {
       sign = sign * -1.0;
       ret += sign * increment;
       if (ret < low) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/FilterClassifier.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/FilterClassifier.java b/library/src/main/java/com/datatorrent/lib/testbench/FilterClassifier.java
index a8ec00b..389cbb3 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/FilterClassifier.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/FilterClassifier.java
@@ -18,16 +18,16 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * Filters the tuples as per the filter (pass through percent) and emits them.
  * <p>
@@ -83,8 +83,7 @@ public class FilterClassifier<T> extends BaseOperator
         ArrayList<Integer> alist;
         if (inkeys != null) {
           alist = inkeys.get(inkey);
-        }
-        else {
+        } else {
           alist = noweight;
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/FilteredEventClassifier.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/FilteredEventClassifier.java b/library/src/main/java/com/datatorrent/lib/testbench/FilteredEventClassifier.java
index 27708dc..77d3970 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/FilteredEventClassifier.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/FilteredEventClassifier.java
@@ -18,16 +18,16 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * This operator takes in a stream of tuples
  * and randomly emits them based on the specified total_filter and pass_filter values.&nbsp;
@@ -110,8 +110,7 @@ public class FilteredEventClassifier<T> extends BaseOperator
           T keyval;
           if (hasvalues) {
             keyval = keys.get(key);
-          }
-          else { // pass on the value from incoming tuple
+          } else { // pass on the value from incoming tuple
             keyval = e.getValue();
           }
           otuple.put(key + "," + inkey, keyval);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/HashTestSink.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/HashTestSink.java b/library/src/main/java/com/datatorrent/lib/testbench/HashTestSink.java
index 63cbaf5..389ecb8 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/HashTestSink.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/HashTestSink.java
@@ -18,11 +18,12 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.api.Sink;
-
 import java.util.HashMap;
+
 import org.apache.commons.lang.mutable.MutableInt;
 
+import com.datatorrent.api.Sink;
+
 /**
  * A sink implementation, which counts the number of times each tuples is collected and stores the results in a hash map.
  * <p></p>
@@ -54,8 +55,7 @@ public class HashTestSink<T> implements Sink<T>
   {
     int ret = -1;
     MutableInt val = map.get(key);
-    if (val != null)
-    {
+    if (val != null) {
       ret = val.intValue();
     }
     return ret;
@@ -64,13 +64,13 @@ public class HashTestSink<T> implements Sink<T>
   @Override
   public void put(T tuple)
   {
-      this.count++;
-      MutableInt val = map.get(tuple);
-      if (val == null) {
-        val = new MutableInt(0);
-        map.put(tuple, val);
-      }
-      val.increment();
+    this.count++;
+    MutableInt val = map.get(tuple);
+    if (val == null) {
+      val = new MutableInt(0);
+      map.put(tuple, val);
+    }
+    val.increment();
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/HttpStatusFilter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/HttpStatusFilter.java b/library/src/main/java/com/datatorrent/lib/testbench/HttpStatusFilter.java
index f24c6e4..08b190e 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/HttpStatusFilter.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/HttpStatusFilter.java
@@ -18,14 +18,13 @@
  */
 package com.datatorrent.lib.testbench;
 
-
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator consumes tuples which are key value pairs of strings.&nbsp;
@@ -39,62 +38,64 @@ import com.datatorrent.api.Context.OperatorContext;
  */
 public class HttpStatusFilter extends BaseOperator
 {
-	private String filterStatus;
-	private Map<String, Integer> collect;
-	public final transient DefaultInputPort<Map<String, String>> inport = new DefaultInputPort<Map<String, String>>() {
+  private String filterStatus;
+  private Map<String, Integer> collect;
+  public final transient DefaultInputPort<Map<String, String>> inport = new DefaultInputPort<Map<String, String>>()
+  {
     @Override
-    public void process(Map<String, String> s) {
-    	for(Map.Entry<String, String> entry : s.entrySet())
-    	{
-    		if (!entry.getValue().equals(filterStatus)) continue;
-	    	if (collect.containsKey(entry.getKey()))
-	    	{
-	    		Integer value = (Integer)collect.remove(entry.getKey());
-	    		collect.put(entry.getKey(), new Integer(value+1));
-	    	} else {
-	    		collect.put(entry.getKey(), new Integer(1));
-	    	}
-    	}
+    public void process(Map<String, String> s)
+    {
+      for (Map.Entry<String, String> entry : s.entrySet()) {
+        if (!entry.getValue().equals(filterStatus)) {
+          continue;
+        }
+        if (collect.containsKey(entry.getKey())) {
+          Integer value = (Integer)collect.remove(entry.getKey());
+          collect.put(entry.getKey(), new Integer(value + 1));
+        } else {
+          collect.put(entry.getKey(), new Integer(1));
+        }
+      }
     }
-	};
+  };
 
-	@Override
-	public void setup(OperatorContext context)
-	{
-		collect  = new HashMap<String, Integer>();
-	}
+  @Override
+  public void setup(OperatorContext context)
+  {
+    collect  = new HashMap<String, Integer>();
+  }
 
-	@Override
-	public void teardown()
-	{
-	}
+  @Override
+  public void teardown()
+  {
+  }
 
-	@Override
-	public void beginWindow(long windowId)
-	{
-		collect  = new HashMap<String, Integer>();
-	}
+  @Override
+  public void beginWindow(long windowId)
+  {
+    collect  = new HashMap<String, Integer>();
+  }
 
-	// out port
-	public final transient DefaultOutputPort<Map<String, Integer>> outport = new DefaultOutputPort<Map<String, Integer>>();
+  // out port
+  public final transient DefaultOutputPort<Map<String, Integer>> outport = new DefaultOutputPort<Map<String, Integer>>();
 
-	@Override
-	public void endWindow()
-	{
-		outport.emit(collect);
-	}
+  @Override
+  public void endWindow()
+  {
+    outport.emit(collect);
+  }
 
-	public String getFilterStatus()
-	{
-		return filterStatus;
-	}
+  public String getFilterStatus()
+  {
+    return filterStatus;
+  }
 
-	/**
-	 * Only key with the following value is counted.
-	 * @param filterStatus
-	 */
-	public void setFilterStatus(String filterStatus)
-	{
-		this.filterStatus = filterStatus;
-	}
+  /**
+   * Only key with the following value is counted.
+   * @param filterStatus
+   */
+  public void setFilterStatus(String filterStatus)
+  {
+    this.filterStatus = filterStatus;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/KeyValSum.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/KeyValSum.java b/library/src/main/java/com/datatorrent/lib/testbench/KeyValSum.java
index 0a26961..a6004dc 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/KeyValSum.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/KeyValSum.java
@@ -21,10 +21,10 @@ package com.datatorrent.lib.testbench;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator consumes maps whose keys are strings and values are integers.&nbsp;
@@ -37,51 +37,51 @@ import com.datatorrent.api.Context.OperatorContext;
  */
 public class KeyValSum extends BaseOperator
 {
-	private Map<String, Integer> collect;
+  private Map<String, Integer> collect;
 
   /**
    * This input port on which tuples are received.
    */
-	public final transient DefaultInputPort<Map<String, Integer>> inport = new DefaultInputPort<Map<String, Integer>>() {
+  public final transient DefaultInputPort<Map<String, Integer>> inport = new DefaultInputPort<Map<String, Integer>>()
+  {
     @Override
-    public void process(Map<String, Integer> s) {
-    	for(Map.Entry<String, Integer> entry : s.entrySet())
-    	{
-	    	if (collect.containsKey(entry.getKey()))
-	    	{
-	    		Integer value = (Integer)collect.remove(entry.getKey());
-	    		collect.put(entry.getKey(), value + entry.getValue());
-	    	} else {
-	    		collect.put(entry.getKey(), entry.getValue());
-	    	}
-    	}
+    public void process(Map<String, Integer> s)
+    {
+      for (Map.Entry<String, Integer> entry : s.entrySet()) {
+        if (collect.containsKey(entry.getKey())) {
+          Integer value = (Integer)collect.remove(entry.getKey());
+          collect.put(entry.getKey(), value + entry.getValue());
+        } else {
+          collect.put(entry.getKey(), entry.getValue());
+        }
+      }
     }
-	};
+  };
 
-	@Override
-	public void setup(OperatorContext context)
-	{
-	}
+  @Override
+  public void setup(OperatorContext context)
+  {
+  }
 
-	@Override
-	public void teardown()
-	{
-	}
+  @Override
+  public void teardown()
+  {
+  }
 
-	@Override
-	public void beginWindow(long windowId)
-	{
-		collect  = new HashMap<String, Integer>();
-	}
+  @Override
+  public void beginWindow(long windowId)
+  {
+    collect  = new HashMap<String, Integer>();
+  }
 
-	/**
+  /**
    * The output port on which sums are emitted.
    */
-	public final transient DefaultOutputPort<Map<String, Integer>> outport = new DefaultOutputPort<Map<String, Integer>>();
+  public final transient DefaultOutputPort<Map<String, Integer>> outport = new DefaultOutputPort<Map<String, Integer>>();
 
-	@Override
-	public void endWindow()
-	{
-		outport.emit(collect);
-	}
+  @Override
+  public void endWindow()
+  {
+    outport.emit(collect);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/RandomEventGenerator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/RandomEventGenerator.java b/library/src/main/java/com/datatorrent/lib/testbench/RandomEventGenerator.java
index f8c0b51..55b5c04 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/RandomEventGenerator.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/RandomEventGenerator.java
@@ -18,14 +18,15 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.InputOperator;
-import com.datatorrent.api.Context.OperatorContext;
-
 import java.util.Random;
+
 import javax.validation.constraints.Min;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.InputOperator;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * Generates synthetic load.&nbsp;Creates tuples using random numbers and keeps emitting them on the output port string_data and integer_data.
  * <p>
@@ -117,7 +118,8 @@ public class RandomEventGenerator extends BaseOperator implements InputOperator
     tuplesBlast = i;
   }
 
-  public void setTuplesBlastIntervalMillis(int tuplesBlastIntervalMillis) {
+  public void setTuplesBlastIntervalMillis(int tuplesBlastIntervalMillis)
+  {
     this.tuplesBlastIntervalMillis = tuplesBlastIntervalMillis;
   }
 
@@ -172,6 +174,7 @@ public class RandomEventGenerator extends BaseOperator implements InputOperator
       try {
         Thread.sleep(tuplesBlastIntervalMillis);
       } catch (InterruptedException e) {
+        //fixme
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/RandomWordGenerator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/RandomWordGenerator.java b/library/src/main/java/com/datatorrent/lib/testbench/RandomWordGenerator.java
index e88f06f..6d73cab 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/RandomWordGenerator.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/RandomWordGenerator.java
@@ -18,11 +18,13 @@
  */
 package com.datatorrent.lib.testbench;
 
+import java.util.Random;
+
+import javax.validation.constraints.Min;
+
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.InputOperator;
-import java.util.Random;
-import javax.validation.constraints.Min;
 
 /**
  * This is an input operator which generates random tuples that are an array of bytes.
@@ -87,10 +89,7 @@ public class RandomWordGenerator implements InputOperator
   @Override
   public void emitTuples()
   {
-    for(;
-        tupleCounter < tuplesPerWindow;
-        tupleCounter++)
-    {
+    for (; tupleCounter < tuplesPerWindow; tupleCounter++) {
       byte[] bytes = new byte[tupleSize];
       random.nextBytes(bytes);
       output.emit(bytes);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/RedisSumOper.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/RedisSumOper.java b/library/src/main/java/com/datatorrent/lib/testbench/RedisSumOper.java
index 1e03b29..8c5a24e 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/RedisSumOper.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/RedisSumOper.java
@@ -22,10 +22,10 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator collects integer tuples, then emits their sum at the end of the window.
@@ -37,46 +37,50 @@ import com.datatorrent.api.Context.OperatorContext;
  */
 public class RedisSumOper extends BaseOperator
 {
-	private ArrayList<Integer> collect;
+  private ArrayList<Integer> collect;
 
   /**
    * This is the input port which receives integer tuples to be summed.
    */
-	public final transient DefaultInputPort<Integer> inport = new DefaultInputPort<Integer>() {
-	    @Override
-	    public void process(Integer s) {
-	      collect.add(s);
-	    }
-	};
+  public final transient DefaultInputPort<Integer> inport = new DefaultInputPort<Integer>()
+  {
+    @Override
+    public void process(Integer s)
+    {
+      collect.add(s);
+    }
+  };
 
-	@Override
-	public void setup(OperatorContext context)
-	{
-	}
+  @Override
+  public void setup(OperatorContext context)
+  {
+  }
 
-	@Override
-	public void teardown()
-	{
-	}
+  @Override
+  public void teardown()
+  {
+  }
 
-	@Override
-	public void beginWindow(long windowId)
-	{
-		collect  = new ArrayList<Integer>();
-	}
+  @Override
+  public void beginWindow(long windowId)
+  {
+    collect  = new ArrayList<Integer>();
+  }
 
-	/**
+  /**
    * This is the output port which emits the summed tuples.
    */
-	public final transient DefaultOutputPort<Map<Integer, Integer>> outport = new DefaultOutputPort<Map<Integer, Integer>>();
+  public final transient DefaultOutputPort<Map<Integer, Integer>> outport = new DefaultOutputPort<Map<Integer, Integer>>();
 
-	@Override
-	public void endWindow()
-	{
-		Integer sum = 0;
-		for(Integer entry : collect) sum += entry;
-		Map<Integer, Integer> tuple = new HashMap<Integer, Integer>();
-		tuple.put(1, sum);
-		outport.emit(tuple);
-	}
+  @Override
+  public void endWindow()
+  {
+    Integer sum = 0;
+    for (Integer entry : collect) {
+      sum += entry;
+    }
+    Map<Integer, Integer> tuple = new HashMap<Integer, Integer>();
+    tuple.put(1, sum);
+    outport.emit(tuple);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/SeedEventClassifier.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/SeedEventClassifier.java b/library/src/main/java/com/datatorrent/lib/testbench/SeedEventClassifier.java
index 20fefc8..efbdca8 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/SeedEventClassifier.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/SeedEventClassifier.java
@@ -18,14 +18,15 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-
 import java.util.HashMap;
+
 import javax.validation.constraints.NotNull;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * This operator receives data on two input ports (data1, and data2).&nbsp;
  * Each incoming tuple is given a seed value
@@ -129,6 +130,7 @@ public class SeedEventClassifier<T> extends BaseOperator
       seed = s_start;
     }
   }
+
   /**
    * Data for classification values
    */

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/SeedEventGenerator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/SeedEventGenerator.java b/library/src/main/java/com/datatorrent/lib/testbench/SeedEventGenerator.java
index f1541a5..3d02051 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/SeedEventGenerator.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/SeedEventGenerator.java
@@ -18,16 +18,18 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.InputOperator;
-import com.datatorrent.lib.util.KeyValPair;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Random;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.InputOperator;
+import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.lib.util.KeyValPair;
+
 /**
  * Generates a one time seed load based on the range provided by the keys,
  * and adds new classification to incoming keys.&nbsp;
@@ -135,8 +137,7 @@ public class SeedEventGenerator extends BaseOperator implements InputOperator
       for (int i = lstart; i < lend; i++) {
         emitTuple(i);
       }
-    }
-    else {
+    } else {
       for (int i = lstart; i > lend; i--) {
         emitTuple(i);
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/SumTestSink.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/SumTestSink.java b/library/src/main/java/com/datatorrent/lib/testbench/SumTestSink.java
index c10c784..03f0c42 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/SumTestSink.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/SumTestSink.java
@@ -45,7 +45,7 @@ public class SumTestSink<T> implements Sink<T>
   public void put(T payload)
   {
     if (payload instanceof Number) {
-      val += ((Number) payload).doubleValue();
+      val += ((Number)payload).doubleValue();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/ThroughputCounter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/ThroughputCounter.java b/library/src/main/java/com/datatorrent/lib/testbench/ThroughputCounter.java
index c645619..72a0b1f 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/ThroughputCounter.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/ThroughputCounter.java
@@ -18,18 +18,19 @@
  */
 package com.datatorrent.lib.testbench;
 
-
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
-
 import java.util.HashMap;
 import java.util.Map;
+
 import javax.validation.constraints.Min;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * This operator expects incoming tuples to be of type HashMap&lt;String, Integer&gt;.&nbsp;
  * These values are throughput per window from upstream operators.&nbsp;
@@ -156,8 +157,7 @@ public class ThroughputCounter<K, V extends Number> extends BaseOperator
     long tuples_per_sec = (tuple_count * 1000) / elapsedTime; // * 1000 as elapsedTime is in millis
     if (rolling_window_count == 1) {
       average = tuples_per_sec;
-    }
-    else { // use tuple_numbers
+    } else { // use tuple_numbers
       long slots;
       if (count_denominator == rolling_window_count) {
         tuple_numbers[tuple_index] = tuple_count;
@@ -167,8 +167,7 @@ public class ThroughputCounter<K, V extends Number> extends BaseOperator
         if (tuple_index == rolling_window_count) {
           tuple_index = 0;
         }
-      }
-      else {
+      } else {
         tuple_numbers[count_denominator - 1] = tuple_count;
         time_numbers[count_denominator - 1] = elapsedTime;
         slots = count_denominator;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/testbench/TopOccurrence.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/testbench/TopOccurrence.java b/library/src/main/java/com/datatorrent/lib/testbench/TopOccurrence.java
index 24e742b..3f3da57 100644
--- a/library/src/main/java/com/datatorrent/lib/testbench/TopOccurrence.java
+++ b/library/src/main/java/com/datatorrent/lib/testbench/TopOccurrence.java
@@ -23,9 +23,9 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator consumes key value pairs of strings and integers.&nbsp;
@@ -38,110 +38,106 @@ import com.datatorrent.api.DefaultOutputPort;
  */
 public class TopOccurrence extends BaseOperator
 {
-	// n value
-	private int n = 5;
-	private int threshold = 5;
+  // n value
+  private int n = 5;
+  private int threshold = 5;
 
   /**
    *
    */
-	public final transient DefaultOutputPort<Map<Integer, String>> outport = new DefaultOutputPort<Map<Integer, String>>();
-	/**
+  public final transient DefaultOutputPort<Map<Integer, String>> outport = new DefaultOutputPort<>();
+  /**
    *
    */
-  public final transient DefaultOutputPort<Map<Integer, String>> gtThreshold = new DefaultOutputPort<Map<Integer, String>>();
+  public final transient DefaultOutputPort<Map<Integer, String>> gtThreshold = new DefaultOutputPort<>();
 
-	// input port
-	public final transient DefaultInputPort<Map<String, Integer>> inport =
-			 new DefaultInputPort<Map<String, Integer>>() {
+  // input port
+  public final transient DefaultInputPort<Map<String, Integer>> inport = new DefaultInputPort<Map<String, Integer>>()
+  {
     @Override
     public void process(Map<String, Integer> tuple)
     {
       int numOuts = 0;
-      if (tuple.size() < n)
-      {
-    	for (Map.Entry<String, Integer> entry : tuple.entrySet())
-      	{
-      		Map<Integer, String> out = new HashMap<Integer, String>();
-      		String value = new StringBuilder(entry.getKey()).append("##").append(entry.getValue()).toString();
-      		out.put(numOuts++, value);
-      		outport.emit(out);
-      	}
-      	while(numOuts < n)
-      	{
-      		Map<Integer, String> out = new HashMap<Integer, String>();
-      		out.put(numOuts++, "");
-      		outport.emit(out);
-      	}
+      if (tuple.size() < n) {
+        for (Map.Entry<String, Integer> entry : tuple.entrySet()) {
+          Map<Integer, String> out = new HashMap<Integer, String>();
+          String value = new StringBuilder(entry.getKey()).append("##").append(entry.getValue()).toString();
+          out.put(numOuts++, value);
+          outport.emit(out);
+        }
+        while (numOuts < n) {
+          Map<Integer, String> out = new HashMap<Integer, String>();
+          out.put(numOuts++, "");
+          outport.emit(out);
+        }
       } else {
 
-		ArrayList<Integer> values = new ArrayList<Integer>();
-		for (Map.Entry<String, Integer> entry : tuple.entrySet())
-		{
-		  values.add(entry.getValue());
-		}
-		Collections.sort(values);
-		for (int i=values.size()-1; i >= 0; i--)
-		{
-		  for (Map.Entry<String, Integer> entry : tuple.entrySet())
-	      {
-			if (entry.getValue() == values.get(i))
-			{
-			  Map<Integer, String> out = new HashMap<Integer, String>();
-			  String value = new StringBuilder(entry.getKey()).append("##").append(entry.getValue()).toString();
-			  out.put(numOuts++, value);
-			  outport.emit(out);
-			}
-			if (numOuts >= n) break;
-	      }
-		  if (numOuts >= n) break;
-		}
+        ArrayList<Integer> values = new ArrayList<Integer>();
+        for (Map.Entry<String, Integer> entry : tuple.entrySet()) {
+          values.add(entry.getValue());
+        }
+        Collections.sort(values);
+
+        for (int i = values.size() - 1; i >= 0; i--) {
+          for (Map.Entry<String, Integer> entry : tuple.entrySet()) {
+            if (entry.getValue() == values.get(i)) {
+              Map<Integer, String> out = new HashMap<Integer, String>();
+              String value = new StringBuilder(entry.getKey()).append("##").append(entry.getValue()).toString();
+              out.put(numOuts++, value);
+              outport.emit(out);
+            }
+            if (numOuts >= n) {
+              break;
+            }
+          }
+          if (numOuts >= n) {
+            break;
+          }
+        }
       }
 
       // output greater than threshold
       numOuts = 1;
-      for (Map.Entry<String, Integer> entry : tuple.entrySet())
-      {
-      	if (entry.getValue() > threshold)
-      	{
-      		Map<Integer, String> out = new HashMap<Integer, String>();
-      	    String value = new StringBuilder(entry.getKey()).append("##").append(entry.getValue()).toString();
-		    out.put(numOuts++, value);
-		    gtThreshold.emit(out);
-		}
+      for (Map.Entry<String, Integer> entry : tuple.entrySet()) {
+        if (entry.getValue() > threshold) {
+          Map<Integer, String> out = new HashMap<Integer, String>();
+          String value = new StringBuilder(entry.getKey()).append("##").append(entry.getValue()).toString();
+          out.put(numOuts++, value);
+          gtThreshold.emit(out);
+        }
       }
       Map<Integer, String> out = new HashMap<Integer, String>();
-	  out.put(0,  new Integer(numOuts).toString());
-	  gtThreshold.emit(out);
-     }
-	};
+      out.put(0, new Integer(numOuts).toString());
+      gtThreshold.emit(out);
+    }
+  };
 
-	public int getN()
-	{
-		return n;
-	}
+  public int getN()
+  {
+    return n;
+  }
 
-	/**
-	 * Output n top values
-	 * @param n
-	*/
-	public void setN(int n)
-	{
-		this.n = n;
-	}
+  /**
+   * Output n top values
+   * @param n
+  */
+  public void setN(int n)
+  {
+    this.n = n;
+  }
 
-	public int getThreshold()
-	{
-		return threshold;
-	}
+  public int getThreshold()
+  {
+    return threshold;
+  }
 
-	/**
-	 * Emit the tuples only if it's value is greater than the threshold.
-	 * @param threshold
-	*/
-	public void setThreshold(int threshold)
-	{
-		this.threshold = threshold;
-	}
+  /**
+   * Emit the tuples only if it's value is greater than the threshold.
+   * @param threshold
+  */
+  public void setThreshold(int threshold)
+  {
+    this.threshold = threshold;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/transform/TransformOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/transform/TransformOperator.java b/library/src/main/java/com/datatorrent/lib/transform/TransformOperator.java
index a40bb97..309560b 100644
--- a/library/src/main/java/com/datatorrent/lib/transform/TransformOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/transform/TransformOperator.java
@@ -89,7 +89,8 @@ public class TransformOperator extends BaseOperator implements Operator.Activati
   @OutputPortFieldAnnotation(schemaRequired = true)
   public final transient DefaultOutputPort<Object> output = new DefaultOutputPort<Object>()
   {
-    @Override public void setup(Context.PortContext context)
+    @Override
+    public void setup(Context.PortContext context)
     {
       outputClass = context.getValue(Context.PortContext.TUPLE_CLASS);
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKey.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKey.java b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKey.java
index a9e6d29..5a4d721 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKey.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKey.java
@@ -53,25 +53,28 @@ public abstract class AbstractBaseFrequentKey<K> extends BaseKeyOperator<K>
     }
     count.increment();
   }
+
   protected HashMap<K, MutableInt> keycount = new HashMap<K, MutableInt>();
 
   /**
    * override emitTuple to decide the port to emit to
    * @param tuple
    */
-  abstract public void emitTuple(HashMap<K,Integer> tuple);
+  public abstract void emitTuple(HashMap<K,Integer> tuple);
+
   /**
    * Overide emitList to specify the emit schema
    * @param tlist
    */
-  abstract public void emitList(ArrayList<HashMap<K, Integer>> tlist);
+  public abstract void emitList(ArrayList<HashMap<K, Integer>> tlist);
+
   /**
    * Override compareCount to decide most vs least
    * @param val1
    * @param val2
    * @return result of compareCount to be done by subclass
    */
-  abstract public boolean compareCount(int val1, int val2);
+  public abstract boolean compareCount(int val1, int val2);
 
   /**
    * Emits the result.
@@ -88,14 +91,12 @@ public abstract class AbstractBaseFrequentKey<K> extends BaseKeyOperator<K>
         key = e.getKey();
         kval = e.getValue().intValue();
         map.put(key, null);
-      }
-      else if (compareCount(e.getValue().intValue(), kval)) {
+      } else if (compareCount(e.getValue().intValue(), kval)) {
         key = e.getKey();
         kval = e.getValue().intValue();
         map.clear();
         map.put(key, null);
-      }
-      else if (e.getValue().intValue() == kval) {
+      } else if (e.getValue().intValue() == kval) {
         map.put(e.getKey(), null);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKeyValueMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKeyValueMap.java b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKeyValueMap.java
index 0b2c360..f96b792 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKeyValueMap.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseFrequentKeyValueMap.java
@@ -18,12 +18,13 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.DefaultInputPort;
-
 import java.util.HashMap;
 import java.util.Map;
+
 import org.apache.commons.lang.mutable.MutableInt;
 
+import com.datatorrent.api.DefaultInputPort;
+
 /**
  * This is the base implementation of an operator, which takes key value pairs as inputs.&nbsp;
  * It counts the number of times each key value pair occurs.&nbsp;
@@ -104,14 +105,12 @@ public abstract class AbstractBaseFrequentKeyValueMap<K, V> extends BaseKeyValue
           val = v.getKey();
           kval = v.getValue().intValue();
           vmap.put(val, null);
-        }
-        else if (compareValue(v.getValue().intValue(), kval)) {
+        } else if (compareValue(v.getValue().intValue(), kval)) {
           val = v.getKey();
           kval = v.getValue().intValue();
           vmap.clear();
           vmap.put(val, null);
-        }
-        else if (v.getValue().intValue() == kval) {
+        } else if (v.getValue().intValue() == kval) {
           vmap.put(v.getKey(), null);
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/AbstractBaseMatchOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseMatchOperator.java b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseMatchOperator.java
index b7f4d97..f7d78db 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseMatchOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseMatchOperator.java
@@ -18,11 +18,11 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.lib.util.BaseKeyValueOperator;
 import javax.validation.constraints.NotNull;
 import javax.validation.constraints.Pattern;
 
+import com.datatorrent.api.Context.OperatorContext;
+
 /**
  * This is the base implementation of operators which perform comparisons.&nbsp;
  * A concrete operator should be created from this skeleton implementation.
@@ -60,7 +60,8 @@ public abstract class AbstractBaseMatchOperator<K,V extends Comparable> extends
   public enum supported_type
   {
     LTE, LT, EQ, NEQ, GT, GTE
-  };
+  }
+
   supported_type type = supported_type.EQ;
 
   /**
@@ -166,23 +167,17 @@ public abstract class AbstractBaseMatchOperator<K,V extends Comparable> extends
   {
     if (cmp.equals("lt")) {
       setTypeLT();
-    }
-    else if (cmp.equals("lte")) {
+    } else if (cmp.equals("lte")) {
       setTypeLTE();
-    }
-    else if (cmp.equals("eq")) {
+    } else if (cmp.equals("eq")) {
       setTypeEQ();
-    }
-    else if (cmp.equals("ne")) {
+    } else if (cmp.equals("ne")) {
       setTypeEQ();
-    }
-    else if (cmp.equals("gt")) {
+    } else if (cmp.equals("gt")) {
       setTypeGT();
-    }
-    else if (cmp.equals("gte")) {
+    } else if (cmp.equals("gte")) {
       setTypeGTE();
-    }
-    else {
+    } else {
       setTypeEQ();
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNNonUniqueOperatorMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNNonUniqueOperatorMap.java b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNNonUniqueOperatorMap.java
index ef4c9e4..47282b1 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNNonUniqueOperatorMap.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNNonUniqueOperatorMap.java
@@ -52,13 +52,13 @@ public abstract class AbstractBaseNNonUniqueOperatorMap<K, V> extends AbstractBa
    * Override to decide the direction (ascending vs descending)
    * @return true if ascending, to be done by sub-class
    */
-  abstract public boolean isAscending();
+  public abstract boolean isAscending();
 
   /**
    * Override to decide which port to emit to and its schema
    * @param tuple
    */
-  abstract public void emit(HashMap<K, ArrayList<V>> tuple);
+  public abstract void emit(HashMap<K, ArrayList<V>> tuple);
 
   /**
    *
@@ -75,8 +75,7 @@ public abstract class AbstractBaseNNonUniqueOperatorMap<K, V> extends AbstractBa
         pqueue = new TopNSort<V>(5, n, isAscending());
         kmap.put(cloneKey(e.getKey()), pqueue);
         pqueue.offer(cloneValue(e.getValue()));
-      }
-      else {
+      } else {
         pqueue.offer(e.getValue());
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNOperatorMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNOperatorMap.java b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNOperatorMap.java
index 236a05c..4f6ab85 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNOperatorMap.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractBaseNOperatorMap.java
@@ -52,7 +52,7 @@ import com.datatorrent.api.StreamCodec;
  * @tags rank, key value
  * @since 0.3.2
  */
-abstract public class AbstractBaseNOperatorMap<K,V> extends BaseKeyValueOperator<K,V>
+public abstract class AbstractBaseNOperatorMap<K,V> extends BaseKeyValueOperator<K,V>
 {
   /**
    * This is the input port that receives key value pairs.
@@ -86,7 +86,7 @@ abstract public class AbstractBaseNOperatorMap<K,V> extends BaseKeyValueOperator
    *
    * @param tuple
    */
-  abstract public void processTuple(Map<K,V> tuple);
+  public abstract void processTuple(Map<K,V> tuple);
 
   /**
    * Sets value of N (depth)



[18/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/Serde.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/Serde.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/Serde.java
index fe75928..d4fa73b 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/Serde.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/Serde.java
@@ -31,6 +31,7 @@ import org.apache.commons.lang3.mutable.MutableInt;
  */
 public interface Serde
 {
-  public byte[] serializeObject(Object object);
-  public Object deserializeObject(byte[] object, MutableInt offset);
+  byte[] serializeObject(Object object);
+
+  Object deserializeObject(byte[] object, MutableInt offset);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptor.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptor.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptor.java
index bfd1c46..521cea2 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptor.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeFieldsDescriptor.java
@@ -20,10 +20,10 @@ package com.datatorrent.lib.appdata.gpo;
 
 import java.util.Map;
 
-import com.google.common.collect.Maps;
-
 import org.apache.commons.lang3.mutable.MutableInt;
 
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.appdata.schemas.FieldsDescriptor;
 import com.datatorrent.lib.appdata.schemas.Type;
 
@@ -44,9 +44,9 @@ public class SerdeFieldsDescriptor implements Serde
   @Override
   public synchronized byte[] serializeObject(Object object)
   {
-    FieldsDescriptor fd = (FieldsDescriptor) object;
+    FieldsDescriptor fd = (FieldsDescriptor)object;
 
-    for(Map.Entry<String, Type> entry: fd.getFieldToType().entrySet()) {
+    for (Map.Entry<String, Type> entry : fd.getFieldToType().entrySet()) {
       bal.add(GPOUtils.serializeInt(entry.getValue().ordinal()));
       bal.add(GPOUtils.serializeString(entry.getKey()));
     }
@@ -68,7 +68,7 @@ public class SerdeFieldsDescriptor implements Serde
     int length = GPOUtils.deserializeInt(object, offset);
     int startIndex = offset.intValue();
 
-    while(startIndex + length > offset.intValue()) {
+    while (startIndex + length > offset.intValue()) {
       Type type = Type.values()[GPOUtils.deserializeInt(object, offset)];
       String value = GPOUtils.deserializeString(object, offset);
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutable.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutable.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutable.java
index 2a8aed0..949902b 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutable.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListGPOMutable.java
@@ -21,10 +21,10 @@ package com.datatorrent.lib.appdata.gpo;
 import java.util.ArrayList;
 import java.util.List;
 
-import com.google.common.collect.Lists;
-
 import org.apache.commons.lang3.mutable.MutableInt;
 
+import com.google.common.collect.Lists;
+
 import com.datatorrent.lib.appdata.schemas.FieldsDescriptor;
 
 /**
@@ -45,9 +45,9 @@ public class SerdeListGPOMutable implements Serde
   public synchronized byte[] serializeObject(Object object)
   {
     @SuppressWarnings("unchecked")
-    List<GPOMutable> mutables = (List<GPOMutable>) object;
+    List<GPOMutable> mutables = (List<GPOMutable>)object;
 
-    if(mutables.isEmpty()) {
+    if (mutables.isEmpty()) {
       return GPOUtils.serializeInt(0);
     }
 
@@ -55,9 +55,7 @@ public class SerdeListGPOMutable implements Serde
 
     bytes.add(SerdeFieldsDescriptor.INSTANCE.serializeObject(fd));
 
-    for(int index = 0;
-        index < mutables.size();
-        index++) {
+    for (int index = 0; index < mutables.size(); index++) {
       bytes.add(GPOUtils.serialize(mutables.get(index), bytes));
     }
 
@@ -76,15 +74,14 @@ public class SerdeListGPOMutable implements Serde
     int length = GPOUtils.deserializeInt(object, offset);
     int startIndex = offset.intValue();
 
-    if(length == 0) {
+    if (length == 0) {
       return new ArrayList<GPOMutable>();
     }
 
-    FieldsDescriptor fd =
-    (FieldsDescriptor) SerdeFieldsDescriptor.INSTANCE.deserializeObject(object, offset);
+    FieldsDescriptor fd = (FieldsDescriptor)SerdeFieldsDescriptor.INSTANCE.deserializeObject(object, offset);
 
     List<GPOMutable> mutables = Lists.newArrayList();
-    while(startIndex + length > offset.intValue()) {
+    while (startIndex + length > offset.intValue()) {
       GPOMutable value = GPOUtils.deserialize(fd, object, offset);
       mutables.add(value);
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitive.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitive.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitive.java
index 21dfd66..2f236c7 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitive.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListPrimitive.java
@@ -20,10 +20,10 @@ package com.datatorrent.lib.appdata.gpo;
 
 import java.util.List;
 
-import com.google.common.collect.Lists;
-
 import org.apache.commons.lang3.mutable.MutableInt;
 
+import com.google.common.collect.Lists;
+
 import com.datatorrent.lib.appdata.schemas.Type;
 
 /**
@@ -44,15 +44,13 @@ public class SerdeListPrimitive implements Serde
   public synchronized byte[] serializeObject(Object object)
   {
     @SuppressWarnings("unchecked")
-    List<Object> primitives = (List<Object>) object;
+    List<Object> primitives = (List<Object>)object;
 
-    for(int index = 0;
-        index < primitives.size();
-        index++) {
+    for (int index = 0; index < primitives.size(); index++) {
       Object primitive = primitives.get(index);
       Type type = Type.CLASS_TO_TYPE.get(primitive.getClass());
 
-      if(type == null || type == Type.OBJECT) {
+      if (type == null || type == Type.OBJECT) {
         throw new IllegalArgumentException("Cannot serialize objects of class " + primitive.getClass());
       }
 
@@ -78,7 +76,7 @@ public class SerdeListPrimitive implements Serde
 
     List<Object> listPrimitives = Lists.newArrayList();
 
-    while(startIndex + length > offset.intValue()) {
+    while (startIndex + length > offset.intValue()) {
       int typeOrdinal = GPOUtils.deserializeInt(object, offset);
       GPOType gpoType = GPOType.GPO_TYPE_ARRAY[typeOrdinal];
       Object primitive = gpoType.deserialize(object, offset);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListString.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListString.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListString.java
index 3017696..d2dffe1 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListString.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeListString.java
@@ -20,13 +20,13 @@ package com.datatorrent.lib.appdata.gpo;
 
 import java.util.List;
 
-import com.google.common.collect.Lists;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.commons.lang3.mutable.MutableInt;
 
+import com.google.common.collect.Lists;
+
 /**
  * @since 3.1.0
  */
@@ -47,7 +47,7 @@ public class SerdeListString implements Serde
     int startIndex = offset.intValue();
 
     List<String> strings = Lists.newArrayList();
-    while(startIndex + length > offset.intValue()) {
+    while (startIndex + length > offset.intValue()) {
       String value = GPOUtils.deserializeString(object, offset);
       strings.add(value);
     }
@@ -59,11 +59,9 @@ public class SerdeListString implements Serde
   public synchronized byte[] serializeObject(Object object)
   {
     @SuppressWarnings("unchecked")
-    List<String> strings = (List<String>) object;
+    List<String> strings = (List<String>)object;
 
-    for(int index = 0;
-        index < strings.size();
-        index++) {
+    for (int index = 0; index < strings.size(); index++) {
       String string = strings.get(index);
       byte[] stringBytes = string.getBytes();
       byte[] lengthBytes = GPOUtils.serializeInt(stringBytes.length);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/AbstractWindowEndQueueManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/AbstractWindowEndQueueManager.java b/library/src/main/java/com/datatorrent/lib/appdata/query/AbstractWindowEndQueueManager.java
index cb854b6..b13e2d7 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/AbstractWindowEndQueueManager.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/AbstractWindowEndQueueManager.java
@@ -21,16 +21,15 @@ package com.datatorrent.lib.appdata.query;
 import java.util.concurrent.Semaphore;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.lib.appdata.QueueUtils.ConditionBarrier;
-import com.datatorrent.lib.appdata.query.QueueList.QueueListNode;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
 
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.lib.appdata.QueueUtils.ConditionBarrier;
+import com.datatorrent.lib.appdata.query.QueueList.QueueListNode;
 
 /**
  * This is an abstract implementation of a QueueManager which works in the following way.
@@ -52,8 +51,7 @@ public abstract class AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, QUEU
   /**
    * The {@link QueueList} which is backing this {@link QueueManager}.
    */
-  protected QueueList<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>> queryQueue =
-  new QueueList<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>>();
+  protected QueueList<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>> queryQueue = new QueueList<>();
   /**
    * A pointer to the current node in the {@link QueueList}.
    */
@@ -91,10 +89,9 @@ public abstract class AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, QUEU
    */
   private boolean enqueueHelper(QUERY_TYPE query, META_QUERY metaQuery, QUEUE_CONTEXT context)
   {
-    QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queryQueueable =
-    new QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>(query, metaQuery, context);
+    QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queryQueueable = new QueryBundle<>(query, metaQuery, context);
 
-    QueueListNode<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>> node = new QueueListNode<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>>(queryQueueable);
+    QueueListNode<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>> node = new QueueListNode<>(queryQueueable);
 
     synchronized (numLeft) {
       if (addingFilter(queryQueueable)) {
@@ -131,62 +128,57 @@ public abstract class AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, QUEU
 
     boolean first = true;
 
-    if(block) {
+    if (block) {
       acquire();
     }
 
-    if(currentNode == null) {
+    if (currentNode == null) {
       currentNode = queryQueue.getHead();
       readCurrent = false;
 
-      if(currentNode == null) {
+      if (currentNode == null) {
         return null;
       }
-    }
-    else {
-      if(readCurrent) {
+    } else {
+      if (readCurrent) {
         QueueListNode<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>> tempNode = currentNode.getNext();
 
-        if(tempNode != null) {
+        if (tempNode != null) {
           currentNode = tempNode;
           readCurrent = false;
-        }
-        else {
+        } else {
           return null;
         }
       }
     }
 
-    while(true)
-    {
-      if(block && !first) {
+    while (true) {
+      if (block && !first) {
         acquire();
 
         //TODO dedup this code
-        if(currentNode == null) {
+        if (currentNode == null) {
           currentNode = queryQueue.getHead();
           readCurrent = false;
 
-          if(currentNode == null) {
+          if (currentNode == null) {
             return null;
           }
-        }
-        else {
-          if(readCurrent) {
+        } else {
+          if (readCurrent) {
             QueueListNode<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>> tempNode = currentNode.getNext();
 
-            if(tempNode != null) {
+            if (tempNode != null) {
               currentNode = tempNode;
               readCurrent = false;
-            }
-            else {
+            } else {
               return null;
             }
           }
         }
       }
 
-      synchronized(numLeft) {
+      synchronized (numLeft) {
         numLeft.getAndDecrement();
         QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queryQueueable = currentNode.getPayload();
 
@@ -194,36 +186,31 @@ public abstract class AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, QUEU
 
         QueueListNode<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>> nextNode = currentNode.getNext();
 
-        if(removeBundle(queryQueueable)) {
+        if (removeBundle(queryQueueable)) {
           queryQueue.removeNode(currentNode);
           removedNode(currentNode);
 
-          if(block) {
-            if(nextNode == null) {
+          if (block) {
+            if (nextNode == null) {
               readCurrent = true;
-            }
-            else {
+            } else {
               currentNode = nextNode;
               readCurrent = false;
             }
-          }
-          else {
-            if(nextNode == null) {
+          } else {
+            if (nextNode == null) {
               readCurrent = true;
               break;
-            }
-            else {
+            } else {
               currentNode = nextNode;
             }
           }
-        }
-        else {
+        } else {
           qq = currentNode.getPayload();
 
-          if(nextNode == null) {
+          if (nextNode == null) {
             readCurrent = true;
-          }
-          else {
+          } else {
             currentNode = nextNode;
             readCurrent = false;
           }
@@ -234,11 +221,10 @@ public abstract class AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, QUEU
     }
 
     //Handle the case where non blocking dequeue is happening, semaphore needs to be synched up.
-    if(semaphore.availablePermits() > numLeft.get()) {
+    if (semaphore.availablePermits() > numLeft.get()) {
       try {
         semaphore.acquire(semaphore.availablePermits() - numLeft.get());
-      }
-      catch(InterruptedException ex) {
+      } catch (InterruptedException ex) {
         throw new RuntimeException(ex);
       }
     }
@@ -256,8 +242,7 @@ public abstract class AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, QUEU
   {
     try {
       semaphore.acquire();
-    }
-    catch(InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -296,7 +281,7 @@ public abstract class AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, QUEU
   @Override
   public void beginWindow(long windowId)
   {
-    synchronized(numLeft) {
+    synchronized (numLeft) {
       currentNode = queryQueue.getHead();
       readCurrent = false;
       numLeft.set(queryQueue.getSize());

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/AppDataWindowEndQueueManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/AppDataWindowEndQueueManager.java b/library/src/main/java/com/datatorrent/lib/appdata/query/AppDataWindowEndQueueManager.java
index 48307d7..dac3fd3 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/AppDataWindowEndQueueManager.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/AppDataWindowEndQueueManager.java
@@ -20,10 +20,10 @@ package com.datatorrent.lib.appdata.query;
 
 import java.util.Map;
 
-import com.google.common.collect.Maps;
-
 import org.apache.commons.lang3.mutable.MutableLong;
 
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.appdata.query.QueueList.QueueListNode;
 import com.datatorrent.lib.appdata.schemas.Query;
 
@@ -48,14 +48,13 @@ public class AppDataWindowEndQueueManager<QUERY extends Query, META_QUERY> exten
   @Override
   public boolean enqueue(QUERY query, META_QUERY metaQuery, MutableLong context)
   {
-    if(context != null) {
+    if (context != null) {
       query.setCountdown(context.getValue());
     }
 
-    if(query.isOneTime()) {
+    if (query.isOneTime()) {
       return super.enqueue(query, metaQuery, new MutableLong(1L));
-    }
-    else {
+    } else {
       return super.enqueue(query, metaQuery, new MutableLong(query.getCountdown()));
     }
   }
@@ -76,9 +75,9 @@ public class AppDataWindowEndQueueManager<QUERY extends Query, META_QUERY> exten
   public boolean addingFilter(QueryBundle<QUERY, META_QUERY, MutableLong> queryBundle)
   {
     QueueListNode<QueryBundle<QUERY, META_QUERY, MutableLong>> queryNode =
-    queryIDToNode.get(queryBundle.getQuery().getId());
+        queryIDToNode.get(queryBundle.getQuery().getId());
 
-    if(queryNode == null) {
+    if (queryNode == null) {
       return true;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/QueryBundle.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/QueryBundle.java b/library/src/main/java/com/datatorrent/lib/appdata/query/QueryBundle.java
index f07a100..2f8043c 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/QueryBundle.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/QueryBundle.java
@@ -36,9 +36,7 @@ public class QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>
   {
   }
 
-  public QueryBundle(QUERY_TYPE query,
-                     META_QUERY metaQuery,
-                     QUEUE_CONTEXT queueContext)
+  public QueryBundle(QUERY_TYPE query, META_QUERY metaQuery, QUEUE_CONTEXT queueContext)
   {
     this.query = query;
     this.metaQuery = metaQuery;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/QueryExecutor.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/QueryExecutor.java b/library/src/main/java/com/datatorrent/lib/appdata/query/QueryExecutor.java
index fd77837..41b03d5 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/QueryExecutor.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/QueryExecutor.java
@@ -36,7 +36,5 @@ public interface QueryExecutor<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT>
    * @param queueContext Additoinal information required to queue the query properly.
    * @return The result of the query if it's available. False otherwise.
    */
-  public RESULT executeQuery(QUERY_TYPE query,
-                             META_QUERY metaQuery,
-                             QUEUE_CONTEXT queueContext);
+  public RESULT executeQuery(QUERY_TYPE query, META_QUERY metaQuery, QUEUE_CONTEXT queueContext);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronous.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronous.java b/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronous.java
index 2546aef..f20db7d 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronous.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerAsynchronous.java
@@ -26,21 +26,19 @@ import java.util.concurrent.Semaphore;
 
 import javax.validation.constraints.NotNull;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.lib.appdata.query.serde.MessageSerializerFactory;
-import com.datatorrent.lib.appdata.schemas.Result;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
 
 import com.datatorrent.api.Component;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.Operator.IdleTimeHandler;
-
 import com.datatorrent.common.util.NameableThreadFactory;
+import com.datatorrent.lib.appdata.query.serde.MessageSerializerFactory;
+import com.datatorrent.lib.appdata.schemas.Result;
 
 /**
  * @since 3.1.0
@@ -51,7 +49,7 @@ public class QueryManagerAsynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RES
   private DefaultOutputPort<String> resultPort = null;
 
   //TODO I believe this semaphore is no longer necessary and can just be straight up deleted.
-  private transient final Semaphore inWindowSemaphore = new Semaphore(0);
+  private final transient Semaphore inWindowSemaphore = new Semaphore(0);
   private final ConcurrentLinkedQueue<String> queue = new ConcurrentLinkedQueue<String>();
   private QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queueManager;
   private QueryExecutor<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> queryExecutor;
@@ -62,10 +60,10 @@ public class QueryManagerAsynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RES
   private transient Thread mainThread;
 
   public QueryManagerAsynchronous(DefaultOutputPort<String> resultPort,
-                                  QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queueManager,
-                                  QueryExecutor<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> queryExecutor,
-                                  MessageSerializerFactory messageSerializerFactory,
-                                  Thread mainThread)
+      QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queueManager,
+      QueryExecutor<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> queryExecutor,
+      MessageSerializerFactory messageSerializerFactory,
+      Thread mainThread)
   {
     setResultPort(resultPort);
     setQueueManager(queueManager);
@@ -139,8 +137,7 @@ public class QueryManagerAsynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RES
 
     try {
       inWindowSemaphore.acquire();
-    }
-    catch(InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -149,7 +146,7 @@ public class QueryManagerAsynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RES
   private boolean isProcessingDone()
   {
     if (queueManager instanceof AbstractWindowEndQueueManager) {
-      return ((AbstractWindowEndQueueManager) queueManager).isEmptyAndBlocked();
+      return ((AbstractWindowEndQueueManager)queueManager).isEmptyAndBlocked();
     }
 
     return queueManager.getNumLeft() == 0;
@@ -157,7 +154,7 @@ public class QueryManagerAsynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RES
 
   private void emptyQueue()
   {
-    while(!queue.isEmpty()) {
+    while (!queue.isEmpty()) {
       resultPort.emit(queue.poll());
     }
   }
@@ -194,8 +191,7 @@ public class QueryManagerAsynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RES
     {
       try {
         loop();
-      }
-      catch(Exception ex) {
+      } catch (Exception ex) {
         LOG.error("Exception thrown while processing:", ex);
         mainThread.interrupt();
 
@@ -208,7 +204,7 @@ public class QueryManagerAsynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RES
     private void loop()
     {
       //Do this forever
-      while(true) {
+      while (true) {
         //Grab something from the queue as soon as it's available.
         QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queryBundle = queueManager.dequeueBlock();
 
@@ -219,9 +215,8 @@ public class QueryManagerAsynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RES
         }
 
         //We are gauranteed to be in the operator's window now.
-        Result result = queryExecutor.executeQuery(queryBundle.getQuery(),
-                                                   queryBundle.getMetaQuery(),
-                                                   queryBundle.getQueueContext());
+        Result result = queryExecutor.executeQuery(queryBundle.getQuery(), queryBundle.getMetaQuery(),
+            queryBundle.getQueueContext());
         if (result != null) {
           String serializedMessage = messageSerializerFactory.serialize(result);
           queue.add(serializedMessage);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronous.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronous.java b/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronous.java
index abc632e..a349a52 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronous.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/QueryManagerSynchronous.java
@@ -59,7 +59,7 @@ public class QueryManagerSynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESU
   }
 
   private QueryManagerSynchronous(QueryExecutor<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> queryComputer,
-                                  QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queryQueueManager)
+      QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queryQueueManager)
   {
     setQueryExecutor(queryComputer);
     setQueryQueueManager(queryQueueManager);
@@ -107,15 +107,14 @@ public class QueryManagerSynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESU
     do {
       QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queryBundle = queryQueueManager.dequeue();
 
-      if(queryBundle == null) {
+      if (queryBundle == null) {
         return null;
       }
 
-      result = queryExecutor.executeQuery(queryBundle.getQuery(),
-                                          queryBundle.getMetaQuery(),
-                                          queryBundle.getQueueContext());
+      result = queryExecutor.executeQuery(queryBundle.getQuery(), queryBundle.getMetaQuery(),
+          queryBundle.getQueueContext());
     }
-    while(result == null);
+    while (result == null);
 
     return result;
   }
@@ -169,9 +168,8 @@ public class QueryManagerSynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESU
    * @param queryExecutor The {@link QueryExecutor} the queryExecutor used to execute queries.
    * @return A new instance of QueryManagerSynchronous.
    */
-  public static <QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT>
-  QueryManagerSynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT>
-  newInstance(QueryExecutor<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> queryExecutor)
+  public static <QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> QueryManagerSynchronous<QUERY_TYPE, META_QUERY,
+      QUEUE_CONTEXT, RESULT> newInstance(QueryExecutor<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> queryExecutor)
   {
     return new QueryManagerSynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT>(queryExecutor);
   }
@@ -186,10 +184,9 @@ public class QueryManagerSynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESU
    * @param queryQueueManager The {@link QueueManager} used to queue queries.
    * @return A new instance of QueryManagerSynchronous.
    */
-  public static <QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT>
-  QueryManagerSynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT>
-  newInstance(QueryExecutor<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> queryExecutor,
-              QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queryQueueManager)
+  public static <QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> QueryManagerSynchronous<QUERY_TYPE, META_QUERY,
+      QUEUE_CONTEXT, RESULT> newInstance(QueryExecutor<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT> queryExecutor,
+      QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> queryQueueManager)
   {
     return new QueryManagerSynchronous<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT, RESULT>(queryExecutor,
       queryQueueManager);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/QueueList.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/QueueList.java b/library/src/main/java/com/datatorrent/lib/appdata/query/QueueList.java
index 2bf81f8..386d303 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/QueueList.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/QueueList.java
@@ -60,11 +60,11 @@ public class QueueList<T>
    */
   public void enqueue(QueueListNode<T> node)
   {
-    synchronized(lock) {
+    synchronized (lock) {
       Preconditions.checkNotNull(node);
       size++;
 
-      if(head == null) {
+      if (head == null) {
         head = node;
         tail = node;
         node.setNext(null);
@@ -85,7 +85,7 @@ public class QueueList<T>
    */
   public QueueListNode<T> getHead()
   {
-    synchronized(lock) {
+    synchronized (lock) {
       return head;
     }
   }
@@ -96,27 +96,24 @@ public class QueueList<T>
    */
   public void removeNode(QueueListNode<T> node)
   {
-    synchronized(lock) {
+    synchronized (lock) {
       size--;
 
-    //Handle the case when adding to the end of list and
+      //Handle the case when adding to the end of list and
       //removing a node in parallel
-      if(head == node) {
-        if(tail == node) {
+      if (head == node) {
+        if (tail == node) {
           head = null;
           tail = null;
-        }
-        else {
+        } else {
           head = node.getNext();
           head.setPrev(null);
         }
-      }
-      else {
-        if(tail == node) {
+      } else {
+        if (tail == node) {
           tail = node.getPrev();
           tail.setNext(null);
-        }
-        else {
+        } else {
           node.getPrev().setNext(node.getNext());
           node.getNext().setPrev(node.getPrev());
         }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/QueueManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/QueueManager.java b/library/src/main/java/com/datatorrent/lib/appdata/query/QueueManager.java
index b190e71..3cdfee3 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/QueueManager.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/QueueManager.java
@@ -33,6 +33,7 @@ import com.datatorrent.api.Context.OperatorContext;
  */
 public interface QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> extends Component<OperatorContext>
 {
+
   /**
    * This method enqueues an AppData query.
    * @param query The query to queue.
@@ -42,6 +43,7 @@ public interface QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> extends Com
    * @return True if the query was successfully queued. False otherwise.
    */
   public boolean enqueue(QUERY_TYPE query, META_QUERY metaQuery, QUEUE_CONTEXT queueContext);
+
   /**
    * <p>
    * This method dequeues a query, and returns a {@link QueryBundle} which includes the query,
@@ -53,12 +55,14 @@ public interface QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> extends Com
    * @return The query bundle for a query.
    */
   public QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> dequeue();
+
   /**
    * This should be called in beginWindow of an operator so that the {@link QueueManager} can correctly update
    * its internal state for managing queries.
    * @param windowId The windowId of the current window.
    */
   public void beginWindow(long windowId);
+
   /**
    * This should be called in endWindow of an operator so that the {@link QueueManager} can correctly update its
    * internal state for managing queries.
@@ -80,5 +84,6 @@ public interface QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> extends Com
   public int getNumLeft();
 
   public void haltEnqueue();
+
   public void resumeEnqueue();
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleDoneQueueManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleDoneQueueManager.java b/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleDoneQueueManager.java
index 2a435b2..88ee363 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleDoneQueueManager.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleDoneQueueManager.java
@@ -31,8 +31,8 @@ import com.datatorrent.lib.appdata.query.QueueList.QueueListNode;
  * @param <META_QUERY> The type of any meta data associated with the queries.
  * @since 3.0.0
  */
-public class SimpleDoneQueueManager<QUERY_TYPE, META_QUERY> extends
-AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, MutableBoolean>
+public class SimpleDoneQueueManager<QUERY_TYPE, META_QUERY>
+    extends AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, MutableBoolean>
 {
   private QueueList<QueryBundle<QUERY_TYPE, META_QUERY, MutableBoolean>> queryQueue;
   private Semaphore semaphore = new Semaphore(1);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleQueueManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleQueueManager.java b/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleQueueManager.java
index fa2b3b7..7c44949 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleQueueManager.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/SimpleQueueManager.java
@@ -21,9 +21,8 @@ package com.datatorrent.lib.appdata.query;
 import java.util.LinkedList;
 import java.util.concurrent.Semaphore;
 
-import com.datatorrent.lib.appdata.QueueUtils.ConditionBarrier;
-
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.lib.appdata.QueueUtils.ConditionBarrier;
 
 /**
  * This {@link QueueManager} functions like a standard {@link QueueManager}. Queries can be enqueued and when they are dequeued they are
@@ -34,10 +33,9 @@ import com.datatorrent.api.Context.OperatorContext;
  * @since 3.0.0
  */
 public class SimpleQueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>
-                      implements QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>
+    implements QueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>
 {
-  private LinkedList<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>> queue =
-  new LinkedList<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>>();
+  private LinkedList<QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>> queue = new LinkedList<>();
 
   private final Semaphore semaphore = new Semaphore(0);
   private final ConditionBarrier conditionBarrier = new ConditionBarrier();
@@ -51,10 +49,9 @@ public class SimpleQueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>
   {
     conditionBarrier.gate();
 
-    QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> qq =
-    new QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>(query, metaQuery, queueContext);
+    QueryBundle<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT> qq = new QueryBundle<>(query, metaQuery, queueContext);
 
-    if(queue.offer(qq)) {
+    if (queue.offer(qq)) {
       semaphore.release();
       return true;
     }
@@ -73,8 +70,7 @@ public class SimpleQueueManager<QUERY_TYPE, META_QUERY, QUEUE_CONTEXT>
   {
     try {
       semaphore.acquire();
-    }
-    catch(InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java b/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java
index ea9d54b..83e8634 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java
@@ -24,13 +24,12 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.Semaphore;
 import java.util.concurrent.TimeUnit;
 
-import com.google.common.base.Preconditions;
-
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+
 import com.datatorrent.api.Component;
 import com.datatorrent.api.Context.OperatorContext;
-
 import com.datatorrent.common.util.NameableThreadFactory;
 import com.datatorrent.netlet.util.DTThrowable;
 
@@ -69,11 +68,9 @@ public class WindowBoundedService implements Component<OperatorContext>
     this.runnable = Preconditions.checkNotNull(runnable);
   }
 
-  public WindowBoundedService(long executeIntervalMillis,
-                              Runnable runnable)
+  public WindowBoundedService(long executeIntervalMillis, Runnable runnable)
   {
-    Preconditions.checkArgument(executeIntervalMillis > 0,
-                                "The executeIntervalMillis must be positive");
+    Preconditions.checkArgument(executeIntervalMillis > 0, "The executeIntervalMillis must be positive");
     this.executeIntervalMillis = executeIntervalMillis;
     this.runnable = Preconditions.checkNotNull(runnable);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/WindowEndQueueManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/WindowEndQueueManager.java b/library/src/main/java/com/datatorrent/lib/appdata/query/WindowEndQueueManager.java
index 0b127b8..48ad333 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/WindowEndQueueManager.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/WindowEndQueueManager.java
@@ -29,7 +29,8 @@ import com.datatorrent.lib.appdata.query.QueueList.QueueListNode;
  * @param <META_QUERY> The type of metadata to be associated with queued queries.
  * @since 3.0.0
  */
-public class WindowEndQueueManager<QUERY_TYPE, META_QUERY> extends AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, MutableLong>
+public class WindowEndQueueManager<QUERY_TYPE, META_QUERY>
+    extends AbstractWindowEndQueueManager<QUERY_TYPE, META_QUERY, MutableLong>
 {
   public WindowEndQueueManager()
   {
@@ -44,10 +45,8 @@ public class WindowEndQueueManager<QUERY_TYPE, META_QUERY> extends AbstractWindo
   @Override
   public void endWindow()
   {
-    for(QueueListNode<QueryBundle<QUERY_TYPE, META_QUERY, MutableLong>> tempNode = queryQueue.getHead();
-        tempNode != null;
-        tempNode = tempNode.getNext())
-    {
+    for (QueueListNode<QueryBundle<QUERY_TYPE, META_QUERY, MutableLong>> tempNode = queryQueue.getHead();
+        tempNode != null; tempNode = tempNode.getNext()) {
       MutableLong qc = tempNode.getPayload().getQueueContext();
       qc.decrement();
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/serde/CustomMessageDeserializer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/CustomMessageDeserializer.java b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/CustomMessageDeserializer.java
index 463d095..319dbf3 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/CustomMessageDeserializer.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/CustomMessageDeserializer.java
@@ -37,7 +37,5 @@ public interface CustomMessageDeserializer
    * @return The deserialized message.
    * @throws IOException
    */
-  public abstract Message deserialize(String json,
-                                      Class<? extends Message> message,
-                                      Object context) throws IOException;
+  Message deserialize(String json, Class<? extends Message> message, Object context) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotDeserializer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotDeserializer.java b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotDeserializer.java
index 714c10b..2e51f9c 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotDeserializer.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotDeserializer.java
@@ -19,17 +19,16 @@
 package com.datatorrent.lib.appdata.query.serde;
 
 import java.io.IOException;
-
 import java.util.Map;
 import java.util.Set;
 
-import com.google.common.collect.Sets;
-
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Sets;
+
 import com.datatorrent.lib.appdata.schemas.DataQuerySnapshot;
 import com.datatorrent.lib.appdata.schemas.Fields;
 import com.datatorrent.lib.appdata.schemas.Message;
@@ -46,30 +45,27 @@ public class DataQuerySnapshotDeserializer implements CustomMessageDeserializer
 
   static {
     Set<Fields> firstLevelFieldCombinations = Sets.newHashSet();
+    firstLevelFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_ID, DataQuerySnapshot.FIELD_TYPE,
+        DataQuerySnapshot.FIELD_COUNTDOWN, DataQuerySnapshot.FIELD_DATA,
+        DataQuerySnapshot.FIELD_INCOMPLETE_RESULTS_OK)));
+
+    firstLevelFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_ID, DataQuerySnapshot.FIELD_TYPE,
+        DataQuerySnapshot.FIELD_DATA, DataQuerySnapshot.FIELD_INCOMPLETE_RESULTS_OK)));
+
+    firstLevelFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_ID, DataQuerySnapshot.FIELD_TYPE,
+        DataQuerySnapshot.FIELD_COUNTDOWN, DataQuerySnapshot.FIELD_DATA)));
+
+    firstLevelFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_ID, DataQuerySnapshot.FIELD_TYPE,
+        DataQuerySnapshot.FIELD_DATA)));
+
     firstLevelFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_ID,
-                                                               DataQuerySnapshot.FIELD_TYPE,
-                                                               DataQuerySnapshot.FIELD_COUNTDOWN,
-                                                               DataQuerySnapshot.FIELD_DATA,
-                                                               DataQuerySnapshot.FIELD_INCOMPLETE_RESULTS_OK)));
-    firstLevelFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_ID,
-                                                               DataQuerySnapshot.FIELD_TYPE,
-                                                               DataQuerySnapshot.FIELD_DATA,
-                                                               DataQuerySnapshot.FIELD_INCOMPLETE_RESULTS_OK)));
-    firstLevelFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_ID,
-                                                               DataQuerySnapshot.FIELD_TYPE,
-                                                               DataQuerySnapshot.FIELD_COUNTDOWN,
-                                                               DataQuerySnapshot.FIELD_DATA)));
-    firstLevelFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_ID,
-                                                               DataQuerySnapshot.FIELD_TYPE,
-                                                               DataQuerySnapshot.FIELD_DATA)));
-    firstLevelFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_ID,
-                                                               DataQuerySnapshot.FIELD_TYPE)));
+        DataQuerySnapshot.FIELD_TYPE)));
 
     FIRST_LEVEL_FIELD_COMBINATIONS = firstLevelFieldCombinations;
 
     Set<Fields> dataFieldCombinations = Sets.newHashSet();
     dataFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_SCHEMA_KEYS,
-                                                         DataQuerySnapshot.FIELD_FIELDS)));
+        DataQuerySnapshot.FIELD_FIELDS)));
     dataFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_SCHEMA_KEYS)));
     dataFieldCombinations.add(new Fields(Sets.newHashSet(DataQuerySnapshot.FIELD_FIELDS)));
 
@@ -84,17 +80,13 @@ public class DataQuerySnapshotDeserializer implements CustomMessageDeserializer
   }
 
   @Override
-  public Message deserialize(String json,
-                             Class<? extends Message> clazz,
-                             Object context) throws IOException
+  public Message deserialize(String json, Class<? extends Message> clazz, Object context) throws IOException
   {
     try {
-      return deserializeHelper(json,
-                               context);
-    }
-    catch(Exception ex) {
+      return deserializeHelper(json, context);
+    } catch (Exception ex) {
       if (ex instanceof IOException) {
-        throw (IOException) ex;
+        throw (IOException)ex;
       } else {
         throw new IOException(ex);
       }
@@ -109,8 +101,7 @@ public class DataQuerySnapshotDeserializer implements CustomMessageDeserializer
    * method may return null.
    * @throws Exception
    */
-  private Message deserializeHelper(String json,
-                                    Object context) throws Exception
+  private Message deserializeHelper(String json, Object context) throws Exception
   {
     JSONObject jo = new JSONObject(json);
 
@@ -123,7 +114,7 @@ public class DataQuerySnapshotDeserializer implements CustomMessageDeserializer
     String id = jo.getString(DataQuerySnapshot.FIELD_ID);
     String type = jo.getString(DataQuerySnapshot.FIELD_TYPE);
 
-    if(!type.equals(DataQuerySnapshot.TYPE)) {
+    if (!type.equals(DataQuerySnapshot.TYPE)) {
       LOG.error("Found type {} in the query json, but expected type {}.", type, DataQuerySnapshot.TYPE);
       return null;
     }
@@ -132,7 +123,7 @@ public class DataQuerySnapshotDeserializer implements CustomMessageDeserializer
     long countdown = -1L;
     boolean hasCountdown = jo.has(DataQuerySnapshot.FIELD_COUNTDOWN);
 
-    if(hasCountdown) {
+    if (hasCountdown) {
       countdown = jo.getLong(DataQuerySnapshot.FIELD_COUNTDOWN);
     }
 
@@ -140,7 +131,7 @@ public class DataQuerySnapshotDeserializer implements CustomMessageDeserializer
     Map<String, String> schemaKeys = null;
     Set<String> fieldsSet = Sets.newHashSet();
 
-    if(jo.has(DataQuerySnapshot.FIELD_DATA)) {
+    if (jo.has(DataQuerySnapshot.FIELD_DATA)) {
       JSONObject data = jo.getJSONObject(DataQuerySnapshot.FIELD_DATA);
 
       if (!SchemaUtils.checkValidKeys(data, DATA_FIELD_COMBINATIONS)) {
@@ -152,16 +143,14 @@ public class DataQuerySnapshotDeserializer implements CustomMessageDeserializer
         schemaKeys = SchemaUtils.extractMap(data.getJSONObject(DataQuerySnapshot.FIELD_SCHEMA_KEYS));
       }
 
-      if(data.has(DataQuerySnapshot.FIELD_FIELDS)) {
+      if (data.has(DataQuerySnapshot.FIELD_FIELDS)) {
         //// Fields
         JSONArray jArray = data.getJSONArray(DataQuerySnapshot.FIELD_FIELDS);
 
-        for(int index = 0;
-            index < jArray.length();
-            index++) {
+        for (int index = 0; index < jArray.length(); index++) {
           String field = jArray.getString(index);
 
-          if(!fieldsSet.add(field)) {
+          if (!fieldsSet.add(field)) {
             LOG.error("The field {} was listed more than once, this is an invalid query.", field);
           }
         }
@@ -170,16 +159,10 @@ public class DataQuerySnapshotDeserializer implements CustomMessageDeserializer
 
     Fields fields = new Fields(fieldsSet);
 
-    if(!hasCountdown) {
-      return new DataQuerySnapshot(id,
-                                  fields,
-                                  schemaKeys);
-    }
-    else {
-      return new DataQuerySnapshot(id,
-                                  fields,
-                                  countdown,
-                                  schemaKeys);
+    if (!hasCountdown) {
+      return new DataQuerySnapshot(id, fields, schemaKeys);
+    } else {
+      return new DataQuerySnapshot(id, fields, countdown, schemaKeys);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotValidator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotValidator.java b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotValidator.java
index f8f3533..5805dae 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotValidator.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataQuerySnapshotValidator.java
@@ -45,15 +45,14 @@ public class DataQuerySnapshotValidator implements CustomMessageValidator
   @Override
   public boolean validate(Message query, Object context)
   {
-    DataQuerySnapshot gdqt = (DataQuerySnapshot) query;
-    SnapshotSchema schema = (SnapshotSchema) ((SchemaRegistry) context).getSchema(gdqt.getSchemaKeys());
+    DataQuerySnapshot gdqt = (DataQuerySnapshot)query;
+    SnapshotSchema schema = (SnapshotSchema)((SchemaRegistry)context).getSchema(gdqt.getSchemaKeys());
 
     Set<String> fields = schema.getValuesDescriptor().getFields().getFields();
 
-    if(!fields.containsAll(gdqt.getFields().getFields())) {
-      LOG.error("Some of the fields in the query {} are not one of the valid fields {}.",
-                fields,
-                gdqt.getFields().getFields());
+    if (!fields.containsAll(gdqt.getFields().getFields())) {
+      LOG.error("Some of the fields in the query {} are not one of the valid fields {}.", fields,
+          gdqt.getFields().getFields());
       return false;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataResultSnapshotSerializer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataResultSnapshotSerializer.java b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataResultSnapshotSerializer.java
index 7a7fdc1..0bfccf8 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataResultSnapshotSerializer.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/DataResultSnapshotSerializer.java
@@ -49,15 +49,14 @@ public class DataResultSnapshotSerializer implements CustomMessageSerializer
   {
     try {
       return serializeHelper(result, resultFormatter);
-    }
-    catch(Exception ex) {
+    } catch (Exception ex) {
       throw new RuntimeException(ex);
     }
   }
 
   private String serializeHelper(Message result, ResultFormatter resultFormatter) throws Exception
   {
-    DataResultSnapshot gResult = (DataResultSnapshot) result;
+    DataResultSnapshot gResult = (DataResultSnapshot)result;
 
     JSONObject jo = new JSONObject();
     jo.put(Result.FIELD_ID, gResult.getId());
@@ -65,16 +64,15 @@ public class DataResultSnapshotSerializer implements CustomMessageSerializer
 
     JSONArray ja = new JSONArray();
 
-    for(GPOMutable value: gResult.getValues()) {
-      JSONObject dataValue = GPOUtils.serializeJSONObject(value,
-                                                          ((DataQuerySnapshot) gResult.getQuery()).getFields(),
-                                                          resultFormatter);
+    for (GPOMutable value : gResult.getValues()) {
+      JSONObject dataValue = GPOUtils.serializeJSONObject(value, ((DataQuerySnapshot)gResult.getQuery()).getFields(),
+          resultFormatter);
       ja.put(dataValue);
     }
 
     jo.put(DataResultSnapshot.FIELD_DATA, ja);
 
-    if(!gResult.isOneTime()) {
+    if (!gResult.isOneTime()) {
       jo.put(Result.FIELD_COUNTDOWN, gResult.getCountdown());
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactory.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactory.java b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactory.java
index 7649a47..0feeba0 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactory.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageDeserializerFactory.java
@@ -20,19 +20,18 @@ package com.datatorrent.lib.appdata.query.serde;
 
 import java.io.IOException;
 import java.lang.annotation.Annotation;
-
 import java.util.Map;
 import java.util.Set;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
 import com.datatorrent.lib.appdata.schemas.Message;
 
 /**
@@ -88,8 +87,7 @@ public class MessageDeserializerFactory
    * @param clazz The Class of messages that a context a being set for.
    * @param context The context to use when deserializing messages corresponding to the specified class.
    */
-  public void setContext(Class<? extends Message> clazz,
-                         Object context)
+  public void setContext(Class<? extends Message> clazz, Object context)
   {
     deserializationContext.put(clazz, context);
   }
@@ -104,8 +102,7 @@ public class MessageDeserializerFactory
 
     Set<Class<? extends Message>> clazzes = Sets.newHashSet();
 
-    for(Class<? extends Message> schema: schemas)
-    {
+    for (Class<? extends Message> schema : schemas) {
       Preconditions.checkNotNull(schema, "Provided schema cannot be null");
       Preconditions.checkArgument(!clazzes.contains(schema), "Schema %s was passed twice.", schema);
       clazzes.add(schema);
@@ -116,57 +113,53 @@ public class MessageDeserializerFactory
       Class<? extends CustomMessageDeserializer> cqd = null;
       Class<? extends CustomMessageValidator> cqv = null;
 
-      for(Annotation an: ans)
-      {
-        if(an instanceof MessageType) {
-          if(schemaType != null) {
+      for (Annotation an : ans) {
+        if (an instanceof MessageType) {
+          if (schemaType != null) {
             throw new IllegalArgumentException("Cannot specify the " + MessageType.class +
-              " annotation twice on the class: " + schema);
+                " annotation twice on the class: " + schema);
           }
 
-          schemaType = ((MessageType) an).type();
+          schemaType = ((MessageType)an).type();
 
-          LOG.debug("Detected schemaType for {} is {}",
-                       schema,
-                       schemaType);
-        }
-        else if(an instanceof MessageDeserializerInfo) {
-          if(cqd != null) {
+          LOG.debug("Detected schemaType for {} is {}", schema, schemaType);
+        } else if (an instanceof MessageDeserializerInfo) {
+          if (cqd != null) {
             throw new IllegalArgumentException("Cannot specify the " + MessageDeserializerInfo.class +
-              " annotation twice on the class: " + schema);
+                " annotation twice on the class: " + schema);
           }
 
-          cqd = ((MessageDeserializerInfo) an).clazz();
-        }
-        else if(an instanceof MessageValidatorInfo) {
-          if(cqv != null) {
+          cqd = ((MessageDeserializerInfo)an).clazz();
+        } else if (an instanceof MessageValidatorInfo) {
+          if (cqv != null) {
             throw new IllegalArgumentException("Cannot specify the " + MessageValidatorInfo.class +
-              " annotation twice on the class: ");
+                " annotation twice on the class: ");
           }
 
-          cqv = ((MessageValidatorInfo) an).clazz();
+          cqv = ((MessageValidatorInfo)an).clazz();
         }
       }
 
-      if(schemaType == null) {
+      if (schemaType == null) {
         throw new IllegalArgumentException("No " + MessageType.class + " annotation found on class: " + schema);
       }
 
-      if(cqd == null) {
+      if (cqd == null) {
         throw new IllegalArgumentException("No " + MessageDeserializerInfo.class + " annotation found on class: " +
-          schema);
+            schema);
       }
 
-      if(cqv == null) {
-        throw new IllegalArgumentException("No " + MessageValidatorInfo.class + " annotation found on class: " + schema);
+      if (cqv == null) {
+        throw new IllegalArgumentException(
+            "No " + MessageValidatorInfo.class + " annotation found on class: " + schema);
       }
 
       Class<? extends Message> prevSchema = typeToClass.put(schemaType, schema);
       LOG.debug("prevSchema {}:", prevSchema);
 
-      if(prevSchema != null) {
+      if (prevSchema != null) {
         throw new IllegalArgumentException("Cannot have the " +
-          schemaType + " schemaType defined on multiple classes: " + schema + ", " + prevSchema);
+            schemaType + " schemaType defined on multiple classes: " + schema + ", " + prevSchema);
       }
 
       try {
@@ -174,11 +167,7 @@ public class MessageDeserializerFactory
         CustomMessageValidator cqvI = cqv.newInstance();
         typeToCustomQueryBuilder.put(schemaType, cqdI);
         typeToCustomQueryValidator.put(schemaType, cqvI);
-      }
-      catch(InstantiationException ex) {
-        throw new RuntimeException(ex);
-      }
-      catch(IllegalAccessException ex) {
+      } catch (InstantiationException | IllegalAccessException ex) {
         throw new RuntimeException(ex);
       }
     }
@@ -195,22 +184,19 @@ public class MessageDeserializerFactory
   {
     String type;
 
-    try
-    {
+    try {
       JSONObject jsonObject = new JSONObject(json);
       type = jsonObject.getString(Message.FIELD_TYPE);
-    }
-    catch(JSONException e)
-    {
+    } catch (JSONException e) {
       throw new IOException(e);
     }
 
     CustomMessageDeserializer cqb = typeToCustomQueryBuilder.get(type);
 
-    if(cqb == null) {
+    if (cqb == null) {
       throw new IOException("The query type " +
-                            type +
-                            " does not have a corresponding deserializer.");
+          type +
+          " does not have a corresponding deserializer.");
     }
 
     CustomMessageValidator cqv = typeToCustomQueryValidator.get(type);
@@ -219,7 +205,7 @@ public class MessageDeserializerFactory
 
     LOG.debug("{}", data);
 
-    if(data == null || !(cqv != null && cqv.validate(data, context))) {
+    if (data == null || !(cqv != null && cqv.validate(data, context))) {
       return null;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageSerializerFactory.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageSerializerFactory.java b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageSerializerFactory.java
index 05c4656..014824a 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageSerializerFactory.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/MessageSerializerFactory.java
@@ -19,15 +19,14 @@
 package com.datatorrent.lib.appdata.query.serde;
 
 import java.lang.annotation.Annotation;
-
 import java.util.Map;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.appdata.schemas.Result;
 import com.datatorrent.lib.appdata.schemas.ResultFormatter;
 
@@ -88,53 +87,42 @@ public class MessageSerializerFactory
     CustomMessageSerializer mcrs = clazzToCustomResultBuilder.get(result.getClass());
     Class<? extends Result> schema = result.getClass();
 
-    if(mcrs == null) {
+    if (mcrs == null) {
       Annotation[] ans = schema.getAnnotations();
 
       Class<? extends CustomMessageSerializer> crs = null;
       String type = null;
 
-      for(Annotation an: ans) {
-        if(an instanceof MessageSerializerInfo) {
-          if(crs != null) {
-            throw new UnsupportedOperationException("Cannot specify the "
-                    + MessageSerializerInfo.class
-                    + " annotation twice on the class: "
-                    + schema);
+      for (Annotation an : ans) {
+        if (an instanceof MessageSerializerInfo) {
+          if (crs != null) {
+            throw new UnsupportedOperationException("Cannot specify the " + MessageSerializerInfo.class
+                + " annotation twice on the class: " + schema);
           }
 
           crs = ((MessageSerializerInfo)an).clazz();
-        }
-        else if(an instanceof MessageType) {
-          if(type != null) {
-            throw new UnsupportedOperationException("Cannot specify the " +
-                                                    MessageType.class +
-                                                    " annotation twice on the class: " +
-                                                    schema);
+        } else if (an instanceof MessageType) {
+          if (type != null) {
+            throw new UnsupportedOperationException("Cannot specify the " + MessageType.class +
+                " annotation twice on the class: " + schema);
           }
 
-          type = ((MessageType) an).type();
+          type = ((MessageType)an).type();
         }
       }
 
-      if(crs == null) {
-        throw new UnsupportedOperationException("No " + MessageSerializerInfo.class
-                + " annotation found on class: "
-                + schema);
+      if (crs == null) {
+        throw new UnsupportedOperationException("No " + MessageSerializerInfo.class + " annotation found on class: "
+            + schema);
       }
 
-      if(type == null) {
-        throw new UnsupportedOperationException("No " + MessageType.class +
-                                                " annotation found on class " + schema);
+      if (type == null) {
+        throw new UnsupportedOperationException("No " + MessageType.class + " annotation found on class " + schema);
       }
 
       try {
         mcrs = crs.newInstance();
-      }
-      catch(InstantiationException ex) {
-        throw new RuntimeException(ex);
-      }
-      catch(IllegalAccessException ex) {
+      } catch (InstantiationException | IllegalAccessException ex) {
         throw new RuntimeException(ex);
       }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SchemaQueryDeserializer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SchemaQueryDeserializer.java b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SchemaQueryDeserializer.java
index c17ac5c..970c2ee 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SchemaQueryDeserializer.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SchemaQueryDeserializer.java
@@ -43,8 +43,7 @@ public class SchemaQueryDeserializer implements CustomMessageDeserializer
   {
     try {
       return deserializeHelper(json, message, context);
-    }
-    catch(Exception ex) {
+    } catch (Exception ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -55,7 +54,7 @@ public class SchemaQueryDeserializer implements CustomMessageDeserializer
 
     String type = schemaJO.getString(Query.FIELD_TYPE);
 
-    if(!type.equals(SchemaQuery.TYPE)) {
+    if (!type.equals(SchemaQuery.TYPE)) {
       LOG.error("The given type {} is invalid.", type);
       return null;
     }
@@ -64,36 +63,34 @@ public class SchemaQueryDeserializer implements CustomMessageDeserializer
     Map<String, String> contextKeysMap = null;
     Map<String, String> schemaKeysMap = null;
 
-    if(schemaJO.has(SchemaQuery.FIELD_CONTEXT)) {
+    if (schemaJO.has(SchemaQuery.FIELD_CONTEXT)) {
       JSONObject contextJO = schemaJO.getJSONObject(SchemaQuery.FIELD_CONTEXT);
 
-      if(contextJO.length() == 0) {
+      if (contextJO.length() == 0) {
         LOG.error("The context cannot be empty");
         return null;
       }
 
-      if(contextJO.has(SchemaQuery.FIELD_CONTEXT_KEYS)) {
+      if (contextJO.has(SchemaQuery.FIELD_CONTEXT_KEYS)) {
         JSONObject keys = contextJO.getJSONObject(SchemaQuery.FIELD_CONTEXT_KEYS);
         contextKeysMap = SchemaUtils.extractMap(keys);
 
-        if(contextKeysMap.isEmpty()) {
+        if (contextKeysMap.isEmpty()) {
           contextKeysMap = null;
         }
       }
 
-      if(contextJO.has(SchemaQuery.FIELD_SCHEMA_KEYS)) {
+      if (contextJO.has(SchemaQuery.FIELD_SCHEMA_KEYS)) {
         JSONObject schemaKeys = contextJO.getJSONObject(SchemaQuery.FIELD_SCHEMA_KEYS);
         schemaKeysMap = SchemaUtils.extractMap(schemaKeys);
 
-        if(schemaKeysMap.isEmpty()) {
+        if (schemaKeysMap.isEmpty()) {
           schemaKeysMap = null;
         }
       }
     }
 
-    SchemaQuery sq = new SchemaQuery(id,
-                           schemaKeysMap,
-                           contextKeysMap);
+    SchemaQuery sq = new SchemaQuery(id, schemaKeysMap, contextKeysMap);
 
     return sq;
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataDeserializer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataDeserializer.java b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataDeserializer.java
index 4218c69..348993f 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataDeserializer.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataDeserializer.java
@@ -48,8 +48,7 @@ public class SimpleDataDeserializer implements CustomMessageDeserializer
 
     try {
       data = om.readValue(json, clazz);
-    }
-    catch(IOException ex) {
+    } catch (IOException ex) {
       throw ex;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataSerializer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataSerializer.java b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataSerializer.java
index 53c26ec..0499486 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataSerializer.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/serde/SimpleDataSerializer.java
@@ -45,8 +45,7 @@ public class SimpleDataSerializer implements CustomMessageSerializer
   {
     try {
       return om.writeValueAsString(result);
-    }
-    catch(IOException ex) {
+    } catch (IOException ex) {
       ex.printStackTrace();
       return null;
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/CustomTimeBucket.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/CustomTimeBucket.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/CustomTimeBucket.java
index 3075da5..8f45862 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/CustomTimeBucket.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/CustomTimeBucket.java
@@ -67,31 +67,25 @@ public class CustomTimeBucket implements Serializable, Comparable<CustomTimeBuck
       @SuppressWarnings("LocalVariableHidesMemberVariable")
       TimeBucket timeBucket = TimeBucket.getTimeBucketForSuffixEx(suffix);
 
-      initialize(timeBucket,
-                 amount);
+      initialize(timeBucket, amount);
     }
   }
 
-  public CustomTimeBucket(TimeBucket timeBucket,
-                          long count)
+  public CustomTimeBucket(TimeBucket timeBucket, long count)
   {
-    initialize(timeBucket,
-               count);
+    initialize(timeBucket, count);
   }
 
   public CustomTimeBucket(TimeBucket timeBucket)
   {
     if (timeBucket == TimeBucket.ALL) {
-      initialize(timeBucket,
-                 0L);
+      initialize(timeBucket, 0L);
     } else {
-      initialize(timeBucket,
-                 1L);
+      initialize(timeBucket, 1L);
     }
   }
 
-  private void initialize(TimeBucket timeBucket,
-                          long count)
+  private void initialize(TimeBucket timeBucket, long count)
   {
     this.timeBucket = Preconditions.checkNotNull(timeBucket);
     this.count = count;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshot.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshot.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshot.java
index b294705..b5de6f3 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshot.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshot.java
@@ -32,9 +32,9 @@ import com.datatorrent.lib.appdata.query.serde.MessageValidatorInfo;
  * This object represents queries issued against the {@link SnapshotSchema}.
  * @since 3.0.0
  */
-@MessageType(type=DataQuerySnapshot.TYPE)
-@MessageDeserializerInfo(clazz=DataQuerySnapshotDeserializer.class)
-@MessageValidatorInfo(clazz=DataQuerySnapshotValidator.class)
+@MessageType(type = DataQuerySnapshot.TYPE)
+@MessageDeserializerInfo(clazz = DataQuerySnapshotDeserializer.class)
+@MessageValidatorInfo(clazz = DataQuerySnapshotValidator.class)
 public class DataQuerySnapshot extends Query
 {
   /**
@@ -70,12 +70,9 @@ public class DataQuerySnapshot extends Query
    * @param id The id of the query.
    * @param fields The fields requested by the query.
    */
-  public DataQuerySnapshot(String id,
-                          Fields fields)
+  public DataQuerySnapshot(String id, Fields fields)
   {
-    this(id,
-         fields,
-         null);
+    this(id, fields, null);
   }
 
   /**
@@ -86,13 +83,9 @@ public class DataQuerySnapshot extends Query
    * @param fields The requested fields in the query.
    * @param schemaKeys The schema keys corresponding to the schema this query will be directed against.
    */
-  public DataQuerySnapshot(String id,
-                          Fields fields,
-                          Map<String, String> schemaKeys)
+  public DataQuerySnapshot(String id, Fields fields, Map<String, String> schemaKeys)
   {
-    super(id,
-          TYPE,
-          schemaKeys);
+    super(id, TYPE, schemaKeys);
 
     setFields(fields);
   }
@@ -104,14 +97,9 @@ public class DataQuerySnapshot extends Query
    * @param fields The requested fields in the query.
    * @param countdown The countdown for the query.
    */
-  public DataQuerySnapshot(String id,
-                          Fields fields,
-                          long countdown)
+  public DataQuerySnapshot(String id, Fields fields, long countdown)
   {
-    this(id,
-         fields,
-         countdown,
-         null);
+    this(id, fields, countdown, null);
   }
 
   /**
@@ -122,15 +110,9 @@ public class DataQuerySnapshot extends Query
    * @param schemaKeys The schemaKeys which identify the schema which the query is
    * issued against.
    */
-  public DataQuerySnapshot(String id,
-                          Fields fields,
-                          long countdown,
-                          Map<String, String> schemaKeys)
+  public DataQuerySnapshot(String id, Fields fields, long countdown, Map<String, String> schemaKeys)
   {
-    super(id,
-          TYPE,
-          countdown,
-          schemaKeys);
+    super(id, TYPE, countdown, schemaKeys);
 
     setFields(fields);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshot.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshot.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshot.java
index 6d4eb91..47d068c 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshot.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshot.java
@@ -31,8 +31,8 @@ import com.datatorrent.lib.appdata.query.serde.MessageType;
  * This class represents the result sent in response to a {@link DataQuerySnapshot}.
  * @since 3.0.0
  */
-@MessageType(type=DataResultSnapshot.TYPE)
-@MessageSerializerInfo(clazz=DataResultSnapshotSerializer.class)
+@MessageType(type = DataResultSnapshot.TYPE)
+@MessageSerializerInfo(clazz = DataResultSnapshotSerializer.class)
 public class DataResultSnapshot extends Result
 {
   /**
@@ -48,8 +48,7 @@ public class DataResultSnapshot extends Result
    * @param query The query that this result is a response to.
    * @param values The result values for the query.
    */
-  public DataResultSnapshot(Query query,
-                           List<GPOMutable> values)
+  public DataResultSnapshot(Query query, List<GPOMutable> values)
   {
     super(query);
 
@@ -63,12 +62,9 @@ public class DataResultSnapshot extends Result
    * @param values The result values for the query.
    * @param countdown The countdown value for the result.
    */
-  public DataResultSnapshot(Query query,
-                           List<GPOMutable> values,
-                           long countdown)
+  public DataResultSnapshot(Query query, List<GPOMutable> values, long countdown)
   {
-    super(query,
-          countdown);
+    super(query, countdown);
 
     setValues(values);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java
index 5e82256..2333dbb 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java
@@ -27,6 +27,12 @@ import java.util.Set;
 
 import javax.validation.constraints.NotNull;
 
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.apex.malhar.lib.dimensions.CustomTimeBucketRegistry;
 import org.apache.apex.malhar.lib.dimensions.DimensionsDescriptor;
 import org.apache.apex.malhar.lib.dimensions.aggregator.AbstractCompositeAggregator;
@@ -38,11 +44,6 @@ import org.apache.apex.malhar.lib.dimensions.aggregator.CompositeAggregatorFacto
 import org.apache.apex.malhar.lib.dimensions.aggregator.DefaultCompositeAggregatorFactory;
 import org.apache.apex.malhar.lib.dimensions.aggregator.IncrementalAggregator;
 import org.apache.apex.malhar.lib.dimensions.aggregator.OTFAggregator;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java
index 9ac76c8..6138971 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java
@@ -23,14 +23,15 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.apex.malhar.lib.dimensions.aggregator.AggregatorRegistry;
-import org.apache.apex.malhar.lib.dimensions.aggregator.IncrementalAggregator;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.apex.malhar.lib.dimensions.aggregator.AggregatorRegistry;
+import org.apache.apex.malhar.lib.dimensions.aggregator.IncrementalAggregator;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/schemas/Fields.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Fields.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Fields.java
index 8dc76a4..89cddcf 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Fields.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Fields.java
@@ -19,18 +19,17 @@
 package com.datatorrent.lib.appdata.schemas;
 
 import java.io.Serializable;
-
 import java.util.Collection;
 import java.util.List;
 import java.util.Set;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * This is a somewhat vacuous class for managing field names for AppData schemas and queries.
  * Its function is to maintain a set of field names with no null values. It also maintains a list
@@ -65,9 +64,9 @@ public class Fields implements Serializable
   {
     this.fields = Sets.newHashSet();
 
-    for(String field: fields) {
+    for (String field : fields) {
       Preconditions.checkNotNull(field);
-      if(!this.fields.add(field)) {
+      if (!this.fields.add(field)) {
         throw new IllegalArgumentException("Duplicate field: " + field);
       }
     }
@@ -111,14 +110,14 @@ public class Fields implements Serializable
   @Override
   public boolean equals(Object obj)
   {
-    if(obj == null) {
+    if (obj == null) {
       return false;
     }
-    if(getClass() != obj.getClass()) {
+    if (getClass() != obj.getClass()) {
       return false;
     }
     final Fields other = (Fields)obj;
-    if(this.fields != other.fields && (this.fields == null || !this.fields.equals(other.fields))) {
+    if (this.fields != other.fields && (this.fields == null || !this.fields.equals(other.fields))) {
       return false;
     }
     return true;


[08/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotTest.java
index 0cf03bc..8e827e0 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataQuerySnapshotTest.java
@@ -18,10 +18,11 @@
  */
 package com.datatorrent.lib.appdata.schemas;
 
-import com.google.common.collect.Sets;
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Sets;
+
 public class DataQuerySnapshotTest
 {
   @Test
@@ -29,8 +30,7 @@ public class DataQuerySnapshotTest
   {
     Fields fields = new Fields(Sets.newHashSet("a", "b"));
 
-    DataQuerySnapshot query = new DataQuerySnapshot("1",
-                                                  fields);
+    DataQuerySnapshot query = new DataQuerySnapshot("1", fields);
 
     Assert.assertEquals("This query should be oneTime.", true, query.isOneTime());
   }
@@ -40,9 +40,7 @@ public class DataQuerySnapshotTest
   {
     Fields fields = new Fields(Sets.newHashSet("a", "b"));
 
-    DataQuerySnapshot query = new DataQuerySnapshot("1",
-                                                  fields,
-                                                  1L);
+    DataQuerySnapshot query = new DataQuerySnapshot("1", fields, 1L);
 
     Assert.assertEquals("This query should be oneTime.", false, query.isOneTime());
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshotSerializerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshotSerializerTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshotSerializerTest.java
index fd99013..488f2bf 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshotSerializerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DataResultSnapshotSerializerTest.java
@@ -21,10 +21,6 @@ package com.datatorrent.lib.appdata.schemas;
 import java.util.List;
 import java.util.Map;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 import org.junit.Assert;
@@ -32,6 +28,10 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
 import com.datatorrent.lib.appdata.gpo.GPOMutable;
 import com.datatorrent.lib.appdata.query.serde.DataResultSnapshotSerializer;
 
@@ -43,16 +43,14 @@ public class DataResultSnapshotSerializerTest
   public void simpleResultSerializerTest() throws Exception
   {
     List<GPOMutable> dataValues = createValues();
-    DataQuerySnapshot gQuery = new DataQuerySnapshot("1",
-                                                   new Fields(Sets.newHashSet("a", "b")));
+    DataQuerySnapshot gQuery = new DataQuerySnapshot("1", new Fields(Sets.newHashSet("a", "b")));
 
-    DataResultSnapshot result = new DataResultSnapshot(gQuery,
-                                                     dataValues);
+    DataResultSnapshot result = new DataResultSnapshot(gQuery, dataValues);
 
     DataResultSnapshotSerializer serializer = new DataResultSnapshotSerializer();
 
-    final String expectedJSONResult =
-    "{\"id\":\"1\",\"type\":\"dataResult\",\"data\":[{\"b\":\"hello\",\"a\":\"1\"},{\"b\":\"world\",\"a\":\"2\"}]}";
+    final String expectedJSONResult = "{\"id\":\"1\",\"type\":\"dataResult\",\"data\":[{\"b\":\"hello\",\"a\":\"1\"}," +
+        "{\"b\":\"world\",\"a\":\"2\"}]}";
     String resultJSON = serializer.serialize(result, new ResultFormatter());
 
     JSONObject jo = new JSONObject(expectedJSONResult);
@@ -78,12 +76,9 @@ public class DataResultSnapshotSerializerTest
   public void simpleCountdownTest() throws Exception
   {
     List<GPOMutable> dataValues = createValues();
-    DataQuerySnapshot gQuery = new DataQuerySnapshot("1",
-                                                   new Fields(Sets.newHashSet("a", "b")));
+    DataQuerySnapshot gQuery = new DataQuerySnapshot("1", new Fields(Sets.newHashSet("a", "b")));
 
-    DataResultSnapshot result = new DataResultSnapshot(gQuery,
-                                                     dataValues,
-                                                     2);
+    DataResultSnapshot result = new DataResultSnapshot(gQuery, dataValues, 2);
 
     DataResultSnapshotSerializer serializer = new DataResultSnapshotSerializer();
     String resultJSON = serializer.serialize(result, new ResultFormatter());

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchemaTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchemaTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchemaTest.java
index b3669bc..2306ae0 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchemaTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchemaTest.java
@@ -24,12 +24,12 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.codehaus.jettison.json.JSONArray;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.codehaus.jettison.json.JSONArray;
 
 import org.apache.apex.malhar.lib.dimensions.DimensionsDescriptor;
 import org.apache.apex.malhar.lib.dimensions.aggregator.AggregatorIncrementalType;
@@ -80,21 +80,22 @@ public class DimensionalConfigurationSchemaTest
     final String valueName1Type = "double";
     final String valueName2Type = "integer";
 
-    final String jsonSchema =
-    "{\"keys\":\n" +
-      "[{\"name\":\"" + keyName1 + "\",\"type\":\"" + keyName1Type + "\"},\n" +
-       "{\"name\":\"" + keyName2 + "\",\"type\":\"" + keyName2Type + "\"}],\n" +
-      "\"values\":\n" +
-      "[{\"name\":\"" + valueName1 + "\",\"type\":\"" + valueName1Type + "\"},\n" +
-       "{\"name\":\"" + valueName2 + "\",\"type\":\"" + valueName2Type + "\"}],\n" +
-      "\"timeBuckets\":[all]," +
-      "\"dimensions\":\n" +
-      "[{\"combination\":[\"" + keyName1 + "\",\"" + keyName2 + "\"],\"additionalValues\":[\"" + valueName1 + ":MIN\"," + "\"" + valueName1 + ":MAX\"]},\n" +
-       "{\"combination\":[\"" + keyName1 + "\"],\"additionalValues\":[\"" + valueName2 + ":SUM\"," + "\"" + valueName2 + ":COUNT\"]}]\n" +
-    "}";
+    final String jsonSchema = "{\"keys\":\n" +
+        "[{\"name\":\"" + keyName1 + "\",\"type\":\"" + keyName1Type + "\"},\n" +
+        "{\"name\":\"" + keyName2 + "\",\"type\":\"" + keyName2Type + "\"}],\n" +
+        "\"values\":\n" +
+        "[{\"name\":\"" + valueName1 + "\",\"type\":\"" + valueName1Type + "\"},\n" +
+        "{\"name\":\"" + valueName2 + "\",\"type\":\"" + valueName2Type + "\"}],\n" +
+        "\"timeBuckets\":[all]," +
+        "\"dimensions\":\n" +
+        "[{\"combination\":[\"" + keyName1 + "\",\"" + keyName2 + "\"],\"additionalValues\":[\"" + valueName1 +
+        ":MIN\"," + "\"" + valueName1 + ":MAX\"]},\n" +
+        "{\"combination\":[\"" + keyName1 + "\"],\"additionalValues\":[\"" + valueName2 + ":SUM\"," + "\"" +
+        valueName2 + ":COUNT\"]}]\n" +
+        "}";
 
     DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(jsonSchema,
-                                                            AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
 
     FieldsDescriptor allKeysDescriptor = des.getKeyDescriptor();
 
@@ -128,13 +129,13 @@ public class DimensionalConfigurationSchemaTest
     logger.debug("map: {}", des.getDimensionsDescriptorIDToAggregatorToAggregateDescriptor().get(0));
 
     Assert.assertTrue("Incorrect aggregate fields.",
-                        des.getDimensionsDescriptorIDToAggregatorToAggregateDescriptor().get(0).get("MIN").getFields().getFields().equals(minAggFields));
+        des.getDimensionsDescriptorIDToAggregatorToAggregateDescriptor().get(0).get("MIN").getFields().getFields().equals(minAggFields));
     Assert.assertTrue("Incorrect aggregate fields.",
-                        des.getDimensionsDescriptorIDToAggregatorToAggregateDescriptor().get(0).get("MAX").getFields().getFields().equals(maxAggFields));
+        des.getDimensionsDescriptorIDToAggregatorToAggregateDescriptor().get(0).get("MAX").getFields().getFields().equals(maxAggFields));
     Assert.assertTrue("Incorrect aggregate fields.",
-                        des.getDimensionsDescriptorIDToAggregatorToAggregateDescriptor().get(1).get("SUM").getFields().getFields().equals(sumAggFields));
+        des.getDimensionsDescriptorIDToAggregatorToAggregateDescriptor().get(1).get("SUM").getFields().getFields().equals(sumAggFields));
     Assert.assertTrue("Incorrect aggregate fields.",
-                        des.getDimensionsDescriptorIDToAggregatorToAggregateDescriptor().get(1).get("COUNT").getFields().getFields().equals(countAggFields));
+        des.getDimensionsDescriptorIDToAggregatorToAggregateDescriptor().get(1).get("COUNT").getFields().getFields().equals(countAggFields));
 
     final Map<String, Integer> aggToId = Maps.newHashMap();
     aggToId.put("min", 0);
@@ -158,24 +159,26 @@ public class DimensionalConfigurationSchemaTest
     final String valueName1Type = "double";
     final String valueName2Type = "integer";
 
-    final String jsonSchema =
-    "{\"keys\":\n" +
-      "[{\"name\":\"" + keyName1 + "\",\"type\":\"" + keyName1Type + "\"},\n" +
-       "{\"name\":\"" + keyName2 + "\",\"type\":\"" + keyName2Type + "\"}],\n" +
-      "\"values\":\n" +
-      "[{\"name\":\"" + valueName1 + "\",\"type\":\"" + valueName1Type + "\"},\n" +
-       "{\"name\":\"" + valueName2 + "\",\"type\":\"" + valueName2Type + "\"}],\n" +
-      "\"timeBuckets\":[\"1m\"]," +
-      "\"dimensions\":\n" +
-      "[{\"combination\":[\"" + keyName1 + "\",\"" + keyName2 + "\"],\"additionalValues\":[\"" + valueName1 + ":COUNT\"" + "]},\n" + "]\n" +
-    "}";
+    final String jsonSchema = "{\"keys\":\n" +
+        "[{\"name\":\"" + keyName1 + "\",\"type\":\"" + keyName1Type + "\"},\n" +
+        "{\"name\":\"" + keyName2 + "\",\"type\":\"" + keyName2Type + "\"}],\n" +
+        "\"values\":\n" +
+        "[{\"name\":\"" + valueName1 + "\",\"type\":\"" + valueName1Type + "\"},\n" +
+        "{\"name\":\"" + valueName2 + "\",\"type\":\"" + valueName2Type + "\"}],\n" +
+        "\"timeBuckets\":[\"1m\"]," +
+        "\"dimensions\":\n" +
+        "[{\"combination\":[\"" + keyName1 + "\",\"" + keyName2 + "\"],\"additionalValues\":[\"" + valueName1 +
+        ":COUNT\"" + "]},\n" + "]\n" +
+        "}";
 
     DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(jsonSchema,
-                                                            AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
 
-    FieldsDescriptor fd = des.getDimensionsDescriptorIDToAggregatorIDToOutputAggregatorDescriptor().get(0).get(AggregatorIncrementalType.NAME_TO_ORDINAL.get("COUNT"));
+    FieldsDescriptor fd = des.getDimensionsDescriptorIDToAggregatorIDToOutputAggregatorDescriptor().get(0).get(
+        AggregatorIncrementalType.NAME_TO_ORDINAL.get("COUNT"));
 
-    Assert.assertEquals("Indexes for type compress fields should be 0", 0, (int) fd.getTypeToFieldToIndex().get(Type.LONG).get("valueName1"));
+    Assert.assertEquals("Indexes for type compress fields should be 0", 0,
+        (int)fd.getTypeToFieldToIndex().get(Type.LONG).get("valueName1"));
   }
 
   @Test
@@ -188,17 +191,17 @@ public class DimensionalConfigurationSchemaTest
     final String valueName1Type = "double";
 
     final String jsonSchema = "{\"keys\":\n" +
-                                "[{\"name\":\"" + keyName1 + "\",\"type\":\"" + keyName1Type + "\"}],\n" +
-                                "\"values\":\n" +
-                                "[{\"name\":\"" + valueName1 + "\",\"type\":\"" + valueName1Type + "\",\"aggregators\":[\"AVG\"]}],\n" +
-                                "\"timeBuckets\":[\"1m\"]," +
-                                "\"dimensions\":\n" +
-                                "[{\"combination\":[\"" + keyName1 + "\"]}]}";
+        "[{\"name\":\"" + keyName1 + "\",\"type\":\"" + keyName1Type + "\"}],\n" +
+        "\"values\":\n" +
+        "[{\"name\":\"" + valueName1 + "\",\"type\":\"" + valueName1Type + "\",\"aggregators\":[\"AVG\"]}],\n" +
+        "\"timeBuckets\":[\"1m\"]," +
+        "\"dimensions\":\n" +
+        "[{\"combination\":[\"" + keyName1 + "\"]}]}";
 
     logger.debug("test schema:\n{}", jsonSchema);
 
     DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(jsonSchema,
-                                                            AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
 
     Assert.assertEquals(1, des.getDimensionsDescriptorIDToDimensionsDescriptor().size());
 
@@ -222,8 +225,9 @@ public class DimensionalConfigurationSchemaTest
   @Test
   public void getAllKeysDescriptorTest()
   {
-    DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(SchemaUtils.jarResourceFileToString("adsGenericEventSchema.json"),
-                                                            AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+    DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(
+        SchemaUtils.jarResourceFileToString("adsGenericEventSchema.json"),
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
 
     Set<String> keys = Sets.newHashSet("publisher", "advertiser", "location");
 
@@ -233,8 +237,9 @@ public class DimensionalConfigurationSchemaTest
   @Test
   public void aggregationSchemaTest()
   {
-    DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(SchemaUtils.jarResourceFileToString("adsGenericEventSchemaAggregations.json"),
-                                                            AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+    DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(
+        SchemaUtils.jarResourceFileToString("adsGenericEventSchemaAggregations.json"),
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
 
     Set<String> keys = Sets.newHashSet();
 
@@ -256,8 +261,9 @@ public class DimensionalConfigurationSchemaTest
   @Test
   public void simpleOTFTest()
   {
-    DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(SchemaUtils.jarResourceFileToString("adsGenericEventSchemaOTF.json"),
-                                                            AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+    DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(
+        SchemaUtils.jarResourceFileToString("adsGenericEventSchemaOTF.json"),
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
 
     Assert.assertEquals(4, des.getDimensionsDescriptorIDToAggregatorIDs().get(0).size());
   }
@@ -265,31 +271,29 @@ public class DimensionalConfigurationSchemaTest
   @Test
   public void testConstructorAgreement()
   {
-    DimensionalConfigurationSchema expectedEventSchema = new DimensionalConfigurationSchema(SchemaUtils.jarResourceFileToString("adsGenericEventSchemaAdditional.json"),
-                                                                            AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+    DimensionalConfigurationSchema expectedEventSchema = new DimensionalConfigurationSchema(
+        SchemaUtils.jarResourceFileToString("adsGenericEventSchemaAdditional.json"),
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
     @SuppressWarnings("unchecked")
-    List<Object> publisherEnumVals = (List<Object>) ((List) Lists.newArrayList("twitter","facebook","yahoo","google","bing","amazon"));
+    List<Object> publisherEnumVals = (List<Object>)((List)Lists.newArrayList("twitter", "facebook", "yahoo", "google",
+        "bing", "amazon"));
     @SuppressWarnings("unchecked")
-    List<Object> advertiserEnumVals = (List<Object>) ((List) Lists.newArrayList("starbucks","safeway","mcdonalds","macys","taco bell","walmart","khol's","san diego zoo","pandas","jack in the box","tomatina","ron swanson"));
+    List<Object> advertiserEnumVals = (List<Object>)((List)Lists.newArrayList("starbucks", "safeway", "mcdonalds",
+        "macys", "taco bell", "walmart", "khol's", "san diego zoo", "pandas", "jack in the box", "tomatina",
+        "ron swanson"));
     @SuppressWarnings("unchecked")
-    List<Object> locationEnumVals = (List<Object>) ((List) Lists.newArrayList("N","LREC","SKY","AL","AK","AZ","AR","CA","CO","CT","DE","FL","GA","HI","ID"));
+    List<Object> locationEnumVals = (List<Object>)((List)Lists.newArrayList("N", "LREC", "SKY", "AL", "AK", "AZ", "AR",
+        "CA", "CO", "CT", "DE", "FL", "GA", "HI", "ID"));
 
     List<Key> keys = Lists.newArrayList(new Key("publisher", Type.STRING, publisherEnumVals),
-                                        new Key("advertiser", Type.STRING, advertiserEnumVals),
-                                        new Key("location", Type.STRING, locationEnumVals));
+        new Key("advertiser", Type.STRING, advertiserEnumVals), new Key("location", Type.STRING, locationEnumVals));
+
     List<TimeBucket> timeBuckets = Lists.newArrayList(TimeBucket.MINUTE, TimeBucket.HOUR, TimeBucket.DAY);
-    List<Value> values = Lists.newArrayList(new Value("impressions",
-                                                      Type.LONG,
-                                                      Sets.newHashSet("SUM", "COUNT")),
-                                            new Value("clicks",
-                                                      Type.LONG,
-                                                      Sets.newHashSet("SUM", "COUNT")),
-                                            new Value("cost",
-                                                      Type.DOUBLE,
-                                                      Sets.newHashSet("SUM", "COUNT")),
-                                            new Value("revenue",
-                                                      Type.DOUBLE,
-                                                      Sets.newHashSet("SUM", "COUNT")));
+
+    List<Value> values = Lists.newArrayList(new Value("impressions", Type.LONG, Sets.newHashSet("SUM", "COUNT")),
+        new Value("clicks", Type.LONG, Sets.newHashSet("SUM", "COUNT")),
+        new Value("cost", Type.DOUBLE, Sets.newHashSet("SUM", "COUNT")),
+        new Value("revenue", Type.DOUBLE, Sets.newHashSet("SUM", "COUNT")));
 
     Map<String, Set<String>> valueToAggregators = Maps.newHashMap();
     valueToAggregators.put("impressions", Sets.newHashSet("MIN", "MAX"));
@@ -300,29 +304,18 @@ public class DimensionalConfigurationSchemaTest
     Set<String> emptySet = Sets.newHashSet();
     Map<String, Set<String>> emptyMap = Maps.newHashMap();
 
-    List<DimensionsCombination> dimensionsCombinations =
-    Lists.newArrayList(new DimensionsCombination(new Fields(emptySet),
-                                                 emptyMap),
-                       new DimensionsCombination(new Fields(Sets.newHashSet("location")),
-                                                 emptyMap),
-                       new DimensionsCombination(new Fields(Sets.newHashSet("advertiser")),
-                                                 valueToAggregators),
-                       new DimensionsCombination(new Fields(Sets.newHashSet("publisher")),
-                                                 valueToAggregators),
-                       new DimensionsCombination(new Fields(Sets.newHashSet("advertiser", "location")),
-                                                 emptyMap),
-                       new DimensionsCombination(new Fields(Sets.newHashSet("publisher", "location")),
-                                                 emptyMap),
-                       new DimensionsCombination(new Fields(Sets.newHashSet("publisher", "advertiser")),
-                                                 emptyMap),
-                       new DimensionsCombination(new Fields(Sets.newHashSet("publisher", "advertiser", "location")),
-                                                 emptyMap));
+    List<DimensionsCombination> dimensionsCombinations = Lists.newArrayList(
+        new DimensionsCombination(new Fields(emptySet), emptyMap),
+        new DimensionsCombination(new Fields(Sets.newHashSet("location")), emptyMap),
+        new DimensionsCombination(new Fields(Sets.newHashSet("advertiser")), valueToAggregators),
+        new DimensionsCombination(new Fields(Sets.newHashSet("publisher")), valueToAggregators),
+        new DimensionsCombination(new Fields(Sets.newHashSet("advertiser", "location")), emptyMap),
+        new DimensionsCombination(new Fields(Sets.newHashSet("publisher", "location")), emptyMap),
+        new DimensionsCombination(new Fields(Sets.newHashSet("publisher", "advertiser")), emptyMap),
+        new DimensionsCombination(new Fields(Sets.newHashSet("publisher", "advertiser", "location")), emptyMap));
 
     DimensionalConfigurationSchema eventSchema = new DimensionalConfigurationSchema(keys,
-                                                                    values,
-                                                                    timeBuckets,
-                                                                    dimensionsCombinations,
-                                                                    AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+        values, timeBuckets, dimensionsCombinations, AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
 
     logger.debug("expected {}", expectedEventSchema.getDimensionsDescriptorIDToValueToOTFAggregator());
     logger.debug("actual   {}", eventSchema.getDimensionsDescriptorIDToValueToOTFAggregator());
@@ -333,8 +326,9 @@ public class DimensionalConfigurationSchemaTest
   @Test
   public void testOTFAggregatorMap()
   {
-    DimensionalConfigurationSchema schema = new DimensionalConfigurationSchema(SchemaUtils.jarResourceFileToString("adsGenericEventSchemaOTF.json"),
-                                                                                            AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+    DimensionalConfigurationSchema schema = new DimensionalConfigurationSchema(
+        SchemaUtils.jarResourceFileToString("adsGenericEventSchemaOTF.json"),
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
 
     Set<String> otfAggregator = Sets.newHashSet("AVG");
     Set<String> valueSet = Sets.newHashSet("impressions", "clicks", "cost", "revenue");
@@ -342,16 +336,14 @@ public class DimensionalConfigurationSchemaTest
     List<Map<String, FieldsDescriptor>> aggregatorToDescriptor = schema.getDimensionsDescriptorIDToOTFAggregatorToAggregateDescriptor();
     List<Map<String, Set<String>>> valueToAggregator = schema.getDimensionsDescriptorIDToValueToOTFAggregator();
 
-    for(int ddId = 0;
-        ddId < aggregatorToDescriptor.size();
-        ddId++) {
+    for (int ddId = 0; ddId < aggregatorToDescriptor.size(); ddId++) {
       Assert.assertEquals(otfAggregator, aggregatorToDescriptor.get(ddId).keySet());
       Assert.assertNotNull(aggregatorToDescriptor.get(ddId).get("AVG"));
 
       Assert.assertEquals(valueSet, valueToAggregator.get(ddId).keySet());
       Map<String, Set<String>> tempValueToAgg = valueToAggregator.get(ddId);
 
-      for(Map.Entry<String, Set<String>> entry: tempValueToAgg.entrySet()) {
+      for (Map.Entry<String, Set<String>> entry : tempValueToAgg.entrySet()) {
         Assert.assertEquals(otfAggregator, entry.getValue());
       }
     }
@@ -366,12 +358,11 @@ public class DimensionalConfigurationSchemaTest
 
     Assert.assertEquals(5, schema.getCustomTimeBuckets().size());
     List<CustomTimeBucket> customTimeBuckets = Lists.newArrayList(new CustomTimeBucket(TimeBucket.MINUTE),
-                                                                  new CustomTimeBucket(TimeBucket.HOUR),
-                                                                  new CustomTimeBucket(TimeBucket.DAY),
-                                                                  new CustomTimeBucket(TimeBucket.MINUTE, 5),
-                                                                  new CustomTimeBucket(TimeBucket.HOUR, 3));
-    Assert.assertEquals(customTimeBuckets,
-                        schema.getCustomTimeBuckets());
+        new CustomTimeBucket(TimeBucket.HOUR),
+        new CustomTimeBucket(TimeBucket.DAY),
+        new CustomTimeBucket(TimeBucket.MINUTE, 5),
+        new CustomTimeBucket(TimeBucket.HOUR, 3));
+    Assert.assertEquals(customTimeBuckets, schema.getCustomTimeBuckets());
 
     Assert.assertEquals(40, schema.getDimensionsDescriptorIDToKeyDescriptor().size());
 
@@ -436,8 +427,9 @@ public class DimensionalConfigurationSchemaTest
 
   public void testLoadingSchemaWithNoTimeBucket()
   {
-    DimensionalConfigurationSchema schema = new DimensionalConfigurationSchema(SchemaUtils.jarResourceFileToString("adsGenericEventSchemaNoTime.json"),
-                                                                               AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+    DimensionalConfigurationSchema schema = new DimensionalConfigurationSchema(
+        SchemaUtils.jarResourceFileToString("adsGenericEventSchemaNoTime.json"),
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
 
     Assert.assertEquals(1, schema.getTimeBuckets().size());
     Assert.assertEquals(TimeBucket.ALL, schema.getTimeBuckets().get(0));

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalSchemaTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalSchemaTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalSchemaTest.java
index 50b539e..8fe740b 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalSchemaTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/DimensionalSchemaTest.java
@@ -24,6 +24,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONObject;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
@@ -31,8 +33,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.apex.malhar.lib.dimensions.aggregator.AggregatorRegistry;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONObject;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -58,9 +58,9 @@ public class DimensionalSchemaTest
   public void noEnumsTest()
   {
     //Test if creating schema with no enums works
-    DimensionalConfigurationSchema des =
-    new DimensionalConfigurationSchema(SchemaUtils.jarResourceFileToString("adsGenericEventSchemaNoEnums.json"),
-    AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
+    DimensionalConfigurationSchema des = new DimensionalConfigurationSchema(
+        SchemaUtils.jarResourceFileToString("adsGenericEventSchemaNoEnums.json"),
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY);
   }
 
   @Test
@@ -76,20 +76,20 @@ public class DimensionalSchemaTest
 
     @SuppressWarnings("unchecked")
     List<Set<String>> dimensionCombinationsList = Lists.newArrayList((Set<String>)new HashSet<String>(),
-                                                                     Sets.newHashSet("location"),
-                                                                     Sets.newHashSet("advertiser"),
-                                                                     Sets.newHashSet("publisher"),
-                                                                     Sets.newHashSet("location", "advertiser"),
-                                                                     Sets.newHashSet("location", "publisher"),
-                                                                     Sets.newHashSet("advertiser", "publisher"),
-                                                                     Sets.newHashSet("location", "advertiser", "publisher"));
+        Sets.newHashSet("location"),
+        Sets.newHashSet("advertiser"),
+        Sets.newHashSet("publisher"),
+        Sets.newHashSet("location", "advertiser"),
+        Sets.newHashSet("location", "publisher"),
+        Sets.newHashSet("advertiser", "publisher"),
+        Sets.newHashSet("location", "advertiser", "publisher"));
 
     basicSchemaChecker(resultSchema,
-                       Lists.newArrayList(TimeBucket.ALL.getText()),
-                       Lists.newArrayList("publisher", "advertiser", "location"),
-                       Lists.newArrayList("string", "string", "string"),
-                       valueToType,
-                       dimensionCombinationsList);
+        Lists.newArrayList(TimeBucket.ALL.getText()),
+        Lists.newArrayList("publisher", "advertiser", "location"),
+        Lists.newArrayList("string", "string", "string"),
+        valueToType,
+        dimensionCombinationsList);
   }
 
   @Test
@@ -108,21 +108,16 @@ public class DimensionalSchemaTest
     valueToType.put("revenue:SUM", "double");
 
     @SuppressWarnings("unchecked")
-    List<Set<String>> dimensionCombinationsList = Lists.newArrayList((Set<String>) new HashSet<String>(),
-                                                                     Sets.newHashSet("location"),
-                                                                     Sets.newHashSet("advertiser"),
-                                                                     Sets.newHashSet("publisher"),
-                                                                     Sets.newHashSet("location", "advertiser"),
-                                                                     Sets.newHashSet("location", "publisher"),
-                                                                     Sets.newHashSet("advertiser", "publisher"),
-                                                                     Sets.newHashSet("location", "advertiser", "publisher"));
-
-    basicSchemaChecker(resultSchema,
-                       timeBuckets,
-                       keyNames,
-                       keyTypes,
-                       valueToType,
-                       dimensionCombinationsList);
+    List<Set<String>> dimensionCombinationsList = Lists.newArrayList((Set<String>)new HashSet<String>(),
+        Sets.newHashSet("location"),
+        Sets.newHashSet("advertiser"),
+        Sets.newHashSet("publisher"),
+        Sets.newHashSet("location", "advertiser"),
+        Sets.newHashSet("location", "publisher"),
+        Sets.newHashSet("advertiser", "publisher"),
+        Sets.newHashSet("location", "advertiser", "publisher"));
+
+    basicSchemaChecker(resultSchema, timeBuckets, keyNames, keyTypes, valueToType, dimensionCombinationsList);
   }
 
   @Test
@@ -145,21 +140,16 @@ public class DimensionalSchemaTest
     valueToType.put("revenue:COUNT", "long");
 
     @SuppressWarnings("unchecked")
-    List<Set<String>> dimensionCombinationsList = Lists.newArrayList((Set<String>) new HashSet<String>(),
-                                                                     Sets.newHashSet("location"),
-                                                                     Sets.newHashSet("advertiser"),
-                                                                     Sets.newHashSet("publisher"),
-                                                                     Sets.newHashSet("location", "advertiser"),
-                                                                     Sets.newHashSet("location", "publisher"),
-                                                                     Sets.newHashSet("advertiser", "publisher"),
-                                                                     Sets.newHashSet("location", "advertiser", "publisher"));
+    List<Set<String>> dimensionCombinationsList = Lists.newArrayList((Set<String>)new HashSet<String>(),
+        Sets.newHashSet("location"),
+        Sets.newHashSet("advertiser"),
+        Sets.newHashSet("publisher"),
+        Sets.newHashSet("location", "advertiser"),
+        Sets.newHashSet("location", "publisher"),
+        Sets.newHashSet("advertiser", "publisher"),
+        Sets.newHashSet("location", "advertiser", "publisher"));
 
-    basicSchemaChecker(resultSchema,
-                       timeBuckets,
-                       keyNames,
-                       keyTypes,
-                       valueToType,
-                       dimensionCombinationsList);
+    basicSchemaChecker(resultSchema, timeBuckets, keyNames, keyTypes, valueToType, dimensionCombinationsList);
 
     Map<String, String> additionalValueMap = Maps.newHashMap();
     additionalValueMap.put("impressions:MAX", "long");
@@ -172,21 +162,14 @@ public class DimensionalSchemaTest
     additionalValueMap.put("revenue:MIN", "double");
 
     @SuppressWarnings("unchecked")
-    List<Map<String, String>> additionalValuesList = Lists.newArrayList((Map<String, String>) new HashMap<String, String>(),
-                                                                (Map<String, String>) new HashMap<String, String>(),
-                                                                additionalValueMap,
-                                                                additionalValueMap,
-                                                                (Map<String, String>) new HashMap<String, String>(),
-                                                                (Map<String, String>) new HashMap<String, String>(),
-                                                                (Map<String, String>) new HashMap<String, String>(),
-                                                                (Map<String, String>) new HashMap<String, String>());
+    List<Map<String, String>> additionalValuesList = Lists.newArrayList(new HashMap<String, String>(),
+        new HashMap<String, String>(), additionalValueMap, additionalValueMap, new HashMap<String, String>(),
+        new HashMap<String, String>(), new HashMap<String, String>(), new HashMap<String, String>());
 
     JSONObject data = new JSONObject(resultSchema).getJSONArray("data").getJSONObject(0);
     JSONArray dimensions = data.getJSONArray("dimensions");
 
-    for(int index = 0;
-        index < dimensions.length();
-        index++) {
+    for (int index = 0; index < dimensions.length(); index++) {
       JSONObject combination = dimensions.getJSONObject(index);
 
       Map<String, String> tempAdditionalValueMap = additionalValuesList.get(index);
@@ -194,7 +177,7 @@ public class DimensionalSchemaTest
 
       Set<String> additionalValueSet = Sets.newHashSet();
 
-      if(tempAdditionalValueMap.isEmpty()) {
+      if (tempAdditionalValueMap.isEmpty()) {
         continue;
       }
 
@@ -202,9 +185,7 @@ public class DimensionalSchemaTest
 
       LOG.debug("additionalValues {}", additionalValues);
 
-      for(int aIndex = 0;
-          aIndex < additionalValues.length();
-          aIndex++) {
+      for (int aIndex = 0; aIndex < additionalValues.length(); aIndex++) {
         JSONObject additionalValue = additionalValues.getJSONObject(aIndex);
 
         String valueName = additionalValue.getString("name");
@@ -223,17 +204,16 @@ public class DimensionalSchemaTest
   public void enumValUpdateTest() throws Exception
   {
     String eventSchemaJSON = SchemaUtils.jarResourceFileToString("adsGenericEventSchema.json");
-    DimensionalSchema dimensional = new DimensionalSchema(
-                                    new DimensionalConfigurationSchema(eventSchemaJSON,
-                                                               AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY));
+    DimensionalSchema dimensional = new DimensionalSchema(new DimensionalConfigurationSchema(eventSchemaJSON,
+        AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY));
 
     Map<String, List<Object>> replacementEnums = Maps.newHashMap();
     @SuppressWarnings("unchecked")
-    List<Object> publisherEnumList = ((List<Object>) ((List) Lists.newArrayList("google", "twitter")));
+    List<Object> publisherEnumList = ((List<Object>)((List)Lists.newArrayList("google", "twitter")));
     @SuppressWarnings("unchecked")
-    List<Object> advertiserEnumList = ((List<Object>) ((List) Lists.newArrayList("google", "twitter")));
+    List<Object> advertiserEnumList = ((List<Object>)((List)Lists.newArrayList("google", "twitter")));
     @SuppressWarnings("unchecked")
-    List<Object> locationEnumList = ((List<Object>) ((List) Lists.newArrayList("google", "twitter")));
+    List<Object> locationEnumList = ((List<Object>)((List)Lists.newArrayList("google", "twitter")));
 
     replacementEnums.put("publisher", publisherEnumList);
     replacementEnums.put("advertiser", advertiserEnumList);
@@ -248,17 +228,13 @@ public class DimensionalSchemaTest
 
     Map<String, List<Object>> newEnums = Maps.newHashMap();
 
-    for(int keyIndex = 0;
-        keyIndex < keys.length();
-        keyIndex++) {
+    for (int keyIndex = 0; keyIndex < keys.length(); keyIndex++) {
       JSONObject keyData = keys.getJSONObject(keyIndex);
       String name = keyData.getString(DimensionalConfigurationSchema.FIELD_KEYS_NAME);
       JSONArray enumValues = keyData.getJSONArray(DimensionalConfigurationSchema.FIELD_KEYS_ENUMVALUES);
       List<Object> enumList = Lists.newArrayList();
 
-      for(int enumIndex = 0;
-          enumIndex < enumValues.length();
-          enumIndex++) {
+      for (int enumIndex = 0; enumIndex < enumValues.length(); enumIndex++) {
         enumList.add(enumValues.get(enumIndex));
       }
 
@@ -274,16 +250,15 @@ public class DimensionalSchemaTest
   {
     String eventSchemaJSON = SchemaUtils.jarResourceFileToString("adsGenericEventSchema.json");
     DimensionalSchema dimensional = new DimensionalSchema(
-                                    new DimensionalConfigurationSchema(eventSchemaJSON,
-                                                               AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY));
+        new DimensionalConfigurationSchema(eventSchemaJSON, AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY));
 
     Map<String, Set<Comparable>> replacementEnums = Maps.newHashMap();
     @SuppressWarnings("unchecked")
-    Set<Comparable> publisherEnumList = ((Set<Comparable>) ((Set) Sets.newHashSet("b", "c", "a")));
+    Set<Comparable> publisherEnumList = ((Set<Comparable>)((Set)Sets.newHashSet("b", "c", "a")));
     @SuppressWarnings("unchecked")
-    Set<Comparable> advertiserEnumList = ((Set<Comparable>) ((Set) Sets.newHashSet("b", "c", "a")));
+    Set<Comparable> advertiserEnumList = ((Set<Comparable>)((Set)Sets.newHashSet("b", "c", "a")));
     @SuppressWarnings("unchecked")
-    Set<Comparable> locationEnumList = ((Set<Comparable>) ((Set) Sets.newHashSet("b", "c", "a")));
+    Set<Comparable> locationEnumList = ((Set<Comparable>)((Set)Sets.newHashSet("b", "c", "a")));
 
     replacementEnums.put("publisher", publisherEnumList);
     replacementEnums.put("advertiser", advertiserEnumList);
@@ -291,11 +266,11 @@ public class DimensionalSchemaTest
 
     Map<String, List<Comparable>> expectedOutput = Maps.newHashMap();
     @SuppressWarnings("unchecked")
-    List<Comparable> publisherEnumSortedList = (List<Comparable>) ((List) Lists.newArrayList("a", "b", "c"));
+    List<Comparable> publisherEnumSortedList = (List<Comparable>)((List)Lists.newArrayList("a", "b", "c"));
     @SuppressWarnings("unchecked")
-    List<Comparable> advertiserEnumSortedList = (List<Comparable>) ((List) Lists.newArrayList("a", "b", "c"));
+    List<Comparable> advertiserEnumSortedList = (List<Comparable>)((List)Lists.newArrayList("a", "b", "c"));
     @SuppressWarnings("unchecked")
-    List<Comparable> locationEnumSortedList = (List<Comparable>) ((List) Lists.newArrayList("a", "b", "c"));
+    List<Comparable> locationEnumSortedList = (List<Comparable>)((List)Lists.newArrayList("a", "b", "c"));
 
     expectedOutput.put("publisher", publisherEnumSortedList);
     expectedOutput.put("advertiser", advertiserEnumSortedList);
@@ -310,20 +285,15 @@ public class DimensionalSchemaTest
 
     Map<String, List<Comparable>> newEnums = Maps.newHashMap();
 
-    for(int keyIndex = 0;
-        keyIndex < keys.length();
-        keyIndex++) {
+    for (int keyIndex = 0; keyIndex < keys.length(); keyIndex++) {
       JSONObject keyData = keys.getJSONObject(keyIndex);
       String name = keyData.getString(DimensionalConfigurationSchema.FIELD_KEYS_NAME);
       JSONArray enumValues = keyData.getJSONArray(DimensionalConfigurationSchema.FIELD_KEYS_ENUMVALUES);
       List<Comparable> enumList = Lists.newArrayList();
 
-      for(int enumIndex = 0;
-          enumIndex < enumValues.length();
-          enumIndex++) {
-        enumList.add((Comparable) enumValues.get(enumIndex));
+      for (int enumIndex = 0; enumIndex < enumValues.length(); enumIndex++) {
+        enumList.add((Comparable)enumValues.get(enumIndex));
       }
-
       newEnums.put(name, enumList);
     }
 
@@ -340,8 +310,7 @@ public class DimensionalSchemaTest
 
     String eventSchemaJSON = SchemaUtils.jarResourceFileToString("adsGenericEventSchemaTags.json");
     DimensionalSchema dimensional = new DimensionalSchema(
-      new DimensionalConfigurationSchema(eventSchemaJSON,
-                                         AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY));
+        new DimensionalConfigurationSchema(eventSchemaJSON, AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY));
 
     String schemaJSON = dimensional.getSchemaJSON();
 
@@ -404,8 +373,8 @@ public class DimensionalSchemaTest
     String eventSchemaJSON = SchemaUtils.jarResourceFileToString(resourceName);
 
     MessageSerializerFactory dsf = new MessageSerializerFactory(new ResultFormatter());
-    DimensionalSchema schemaDimensional = new DimensionalSchema(new DimensionalConfigurationSchema(eventSchemaJSON,
-                                                                                           AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY));
+    DimensionalSchema schemaDimensional = new DimensionalSchema(
+        new DimensionalConfigurationSchema(eventSchemaJSON, AggregatorRegistry.DEFAULT_AGGREGATOR_REGISTRY));
 
     SchemaQuery schemaQuery = new SchemaQuery("1");
 
@@ -424,12 +393,9 @@ public class DimensionalSchemaTest
     return stringsArray;
   }
 
-  private void basicSchemaChecker(String resultSchema,
-                                  List<String> timeBuckets,
-                                  List<String> keyNames,
-                                  List<String> keyTypes,
-                                  Map<String, String> valueToType,
-                                  List<Set<String>> dimensionCombinationsList) throws Exception
+  private void basicSchemaChecker(String resultSchema, List<String> timeBuckets, List<String> keyNames,
+      List<String> keyTypes, Map<String, String> valueToType, List<Set<String>> dimensionCombinationsList)
+      throws Exception
   {
     LOG.debug("Schema to check {}", resultSchema);
     JSONObject schemaJO = new JSONObject(resultSchema);
@@ -439,17 +405,13 @@ public class DimensionalSchemaTest
 
     Assert.assertEquals(timeBuckets.size(), jaBuckets.length());
 
-    for(int index = 0;
-        index < jaBuckets.length();
-        index++) {
+    for (int index = 0; index < jaBuckets.length(); index++) {
       Assert.assertEquals(timeBuckets.get(index), jaBuckets.get(index));
     }
 
     JSONArray keys = data.getJSONArray("keys");
 
-    for(int index = 0;
-        index < keys.length();
-        index++) {
+    for (int index = 0; index < keys.length(); index++) {
       JSONObject keyJO = keys.getJSONObject(index);
 
       Assert.assertEquals(keyNames.get(index), keyJO.get("name"));
@@ -463,9 +425,7 @@ public class DimensionalSchemaTest
 
     Set<String> valueNames = Sets.newHashSet();
 
-    for(int index = 0;
-        index < valuesArray.length();
-        index++) {
+    for (int index = 0; index < valuesArray.length(); index++) {
       JSONObject valueJO = valuesArray.getJSONObject(index);
 
       String valueName = valueJO.getString("name");
@@ -481,17 +441,13 @@ public class DimensionalSchemaTest
 
     JSONArray dimensions = data.getJSONArray("dimensions");
 
-    for(int index = 0;
-        index < dimensions.length();
-        index++) {
+    for (int index = 0; index < dimensions.length(); index++) {
       JSONObject combination = dimensions.getJSONObject(index);
       JSONArray dimensionsCombinationArray = combination.getJSONArray("combination");
 
       Set<String> dimensionCombination = Sets.newHashSet();
 
-      for(int dimensionIndex = 0;
-          dimensionIndex < dimensionsCombinationArray.length();
-          dimensionIndex++) {
+      for (int dimensionIndex = 0; dimensionIndex < dimensionsCombinationArray.length(); dimensionIndex++) {
         dimensionCombination.add(dimensionsCombinationArray.getString(dimensionIndex));
       }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptorTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptorTest.java
index 53a3f96..843fb8d 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/FieldsDescriptorTest.java
@@ -23,13 +23,13 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.junit.Assert;
+import org.junit.Test;
+
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
-import org.junit.Assert;
-import org.junit.Test;
-
 public class FieldsDescriptorTest
 {
   @Test
@@ -59,15 +59,15 @@ public class FieldsDescriptorTest
     fieldToType.put(doubleField, Type.DOUBLE);
 
     final List<String> expectedFieldList = Lists.newArrayList(boolField,
-                                                              charField,
-                                                              stringField,
-                                                              objectField,
-                                                              byteField,
-                                                              shortField,
-                                                              integerField,
-                                                              longField,
-                                                              floatField,
-                                                              doubleField);
+        charField,
+        stringField,
+        objectField,
+        byteField,
+        shortField,
+        integerField,
+        longField,
+        floatField,
+        doubleField);
 
     final Fields expectedFields = new Fields(Sets.newHashSet(expectedFieldList));
     final Set<Type> expectedTypes = Sets.newHashSet(fieldToType.values());
@@ -80,10 +80,8 @@ public class FieldsDescriptorTest
     Assert.assertEquals(Sets.newHashSet(), fd.getCompressedTypes());
     Assert.assertEquals(expectedFields, fd.getFields());
     Assert.assertEquals(fieldToType, fd.getFieldToType());
-    Assert.assertTrue(expectedTypes.containsAll(fd.getTypes()) &&
-                      fd.getTypes().containsAll(expectedTypes));
-    Assert.assertTrue(fd.getTypesList().containsAll(expectedTypes) &&
-                      expectedTypes.containsAll(fd.getTypesList()));
+    Assert.assertTrue(expectedTypes.containsAll(fd.getTypes()) && fd.getTypes().containsAll(expectedTypes));
+    Assert.assertTrue(fd.getTypesList().containsAll(expectedTypes) && expectedTypes.containsAll(fd.getTypesList()));
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/ResultFormatterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/ResultFormatterTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/ResultFormatterTest.java
index 5f2d924..10a5e56 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/ResultFormatterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/ResultFormatterTest.java
@@ -53,10 +53,10 @@ public class ResultFormatterTest
 
     final String expectedString = "1.00";
 
-    Assert.assertEquals(expectedString, adf.format((byte) 1));
-    Assert.assertEquals(expectedString, adf.format((short) 1));
+    Assert.assertEquals(expectedString, adf.format((byte)1));
+    Assert.assertEquals(expectedString, adf.format((short)1));
     Assert.assertEquals(expectedString, adf.format(1));
-    Assert.assertEquals(expectedString, adf.format((long) 1));
+    Assert.assertEquals(expectedString, adf.format((long)1));
   }
 
   @Test
@@ -76,14 +76,14 @@ public class ResultFormatterTest
 
     final String discreteString = "1";
 
-    Assert.assertEquals(discreteString, adf.format((byte) 1));
-    Assert.assertEquals(discreteString, adf.format((short) 1));
+    Assert.assertEquals(discreteString, adf.format((byte)1));
+    Assert.assertEquals(discreteString, adf.format((short)1));
     Assert.assertEquals(discreteString, adf.format(1));
-    Assert.assertEquals(discreteString, adf.format((long) 1));
+    Assert.assertEquals(discreteString, adf.format((long)1));
 
     final String continuousString = "1.0";
 
-    Assert.assertEquals(continuousString, adf.format((float) 1));
-    Assert.assertEquals(continuousString, adf.format((double) 1));
+    Assert.assertEquals(continuousString, adf.format((float)1));
+    Assert.assertEquals(continuousString, adf.format((double)1));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaQueryTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaQueryTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaQueryTest.java
index f0263d8..307cab0 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaQueryTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaQueryTest.java
@@ -20,11 +20,11 @@ package com.datatorrent.lib.appdata.schemas;
 
 import java.util.Map;
 
-import com.google.common.collect.Maps;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.appdata.query.serde.MessageDeserializerFactory;
 
 public class SchemaQueryTest
@@ -34,14 +34,14 @@ public class SchemaQueryTest
   {
     final String id = "12345";
     final String schemaQueryJSON = "{" +
-                                      "\"id\":\"" + id + "\"," +
-                                      "\"type\":\"" + SchemaQuery.TYPE + "\"" +
-                                    "}";
+        "\"id\":\"" + id + "\"," +
+        "\"type\":\"" + SchemaQuery.TYPE + "\"" +
+        "}";
 
     @SuppressWarnings("unchecked")
     MessageDeserializerFactory qb = new MessageDeserializerFactory(SchemaQuery.class);
 
-    SchemaQuery schemaQuery = (SchemaQuery) qb.deserialize(schemaQueryJSON);
+    SchemaQuery schemaQuery = (SchemaQuery)qb.deserialize(schemaQueryJSON);
 
     Assert.assertEquals("Id's must match", id, schemaQuery.getId());
     Assert.assertEquals("Types must match", SchemaQuery.TYPE, schemaQuery.getType());
@@ -57,17 +57,17 @@ public class SchemaQueryTest
 
     final String id = "12345";
     final String schemaQueryJSON = "{" +
-                                      "\"id\":\"" + id + "\"," +
-                                      "\"type\":\"" + SchemaQuery.TYPE + "\"," +
-                                      "\"context\":{" +
-                                      "\"schemaKeys\":" +
-                                      "{\"publisher\":\"google\",\"advertiser\":\"microsoft\",\"location\":\"CA\"}" +
-                                   "}}";
+        "\"id\":\"" + id + "\"," +
+        "\"type\":\"" + SchemaQuery.TYPE + "\"," +
+        "\"context\":{" +
+        "\"schemaKeys\":" +
+        "{\"publisher\":\"google\",\"advertiser\":\"microsoft\",\"location\":\"CA\"}" +
+        "}}";
 
     @SuppressWarnings("unchecked")
     MessageDeserializerFactory qb = new MessageDeserializerFactory(SchemaQuery.class);
 
-    SchemaQuery schemaQuery = (SchemaQuery) qb.deserialize(schemaQueryJSON);
+    SchemaQuery schemaQuery = (SchemaQuery)qb.deserialize(schemaQueryJSON);
 
     Assert.assertEquals("Id's must match", id, schemaQuery.getId());
     Assert.assertEquals("Types must match", SchemaQuery.TYPE, schemaQuery.getType());
@@ -88,18 +88,18 @@ public class SchemaQueryTest
 
     final String id = "12345";
     final String schemaQueryJSON = "{" +
-                                      "\"id\":\"" + id + "\"," +
-                                      "\"type\":\"" + SchemaQuery.TYPE + "\"," +
-                                      "\"context\":{" +
-                                      "\"schemaKeys\":" +
-                                      "{\"publisher\":\"google\",\"advertiser\":\"microsoft\",\"location\":\"CA\"}," +
-                                      "\"keys\":{\"publisher\":\"google\",\"advertiser\":\"microsoft\"}" +
-                                   "}}";
+        "\"id\":\"" + id + "\"," +
+        "\"type\":\"" + SchemaQuery.TYPE + "\"," +
+        "\"context\":{" +
+        "\"schemaKeys\":" +
+        "{\"publisher\":\"google\",\"advertiser\":\"microsoft\",\"location\":\"CA\"}," +
+        "\"keys\":{\"publisher\":\"google\",\"advertiser\":\"microsoft\"}" +
+        "}}";
 
     @SuppressWarnings("unchecked")
     MessageDeserializerFactory qb = new MessageDeserializerFactory(SchemaQuery.class);
 
-    SchemaQuery schemaQuery = (SchemaQuery) qb.deserialize(schemaQueryJSON);
+    SchemaQuery schemaQuery = (SchemaQuery)qb.deserialize(schemaQueryJSON);
 
     Assert.assertEquals("Id's must match", id, schemaQuery.getId());
     Assert.assertEquals("Types must match", SchemaQuery.TYPE, schemaQuery.getType());

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultipleTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultipleTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultipleTest.java
index 0513079..5504dda 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultipleTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaRegistryMultipleTest.java
@@ -21,12 +21,12 @@ package com.datatorrent.lib.appdata.schemas;
 import java.util.Collections;
 import java.util.Map;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.util.KryoCloneUtils;
 
 public class SchemaRegistryMultipleTest
@@ -70,15 +70,13 @@ public class SchemaRegistryMultipleTest
     Assert.assertEquals(SCHEMA_SALES_KEYS, tempSalesSchema.getSchemaKeys());
 
     //Query schema for ads
-    SchemaQuery schemaQueryAds = new SchemaQuery(id,
-                                                 SCHEMA_ADS_KEYS);
+    SchemaQuery schemaQueryAds = new SchemaQuery(id, SCHEMA_ADS_KEYS);
     SchemaResult result = registry.getSchemaResult(schemaQueryAds);
     Assert.assertEquals(1, result.getGenericSchemas().length);
     Assert.assertEquals(SCHEMA_ADS_KEYS, result.getGenericSchemas()[0].getSchemaKeys());
 
     //Query schema for sales
-    SchemaQuery schemaQuerySales = new SchemaQuery(id,
-                                                   SCHEMA_SALES_KEYS);
+    SchemaQuery schemaQuerySales = new SchemaQuery(id, SCHEMA_SALES_KEYS);
     result = registry.getSchemaResult(schemaQuerySales);
     Assert.assertEquals(1, result.getGenericSchemas().length);
     Assert.assertEquals(SCHEMA_SALES_KEYS, result.getGenericSchemas()[0].getSchemaKeys());

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaTestUtils.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaTestUtils.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaTestUtils.java
index 2c42879..26320ca 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaTestUtils.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaTestUtils.java
@@ -28,9 +28,7 @@ public class SchemaTestUtils
   {
     String[] result = new String[strings.length];
 
-    for(int sc = 0;
-        sc < strings.length;
-        sc++) {
+    for (int sc = 0; sc < strings.length; sc++) {
       result[sc] = left + strings[sc] + right;
     }
 
@@ -41,9 +39,7 @@ public class SchemaTestUtils
   {
     String[] result = new String[strings.length];
 
-    for(int sc = 0;
-        sc < strings.length;
-        sc++) {
+    for (int sc = 0; sc < strings.length; sc++) {
       result[sc] = ws + strings[sc] + ws;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaUtilsTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaUtilsTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaUtilsTest.java
index 3abee0b..5ae712f 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaUtilsTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SchemaUtilsTest.java
@@ -18,17 +18,17 @@
  */
 package com.datatorrent.lib.appdata.schemas;
 
-import com.google.common.collect.Lists;
-import java.util.Map;
-
-import com.google.common.collect.Maps;
 import java.util.List;
+import java.util.Map;
 
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 public class SchemaUtilsTest
 {
   @Test

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/schemas/SnapshotSchemaTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SnapshotSchemaTest.java b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SnapshotSchemaTest.java
index b07c7e0..5fc5f4b 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/schemas/SnapshotSchemaTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/schemas/SnapshotSchemaTest.java
@@ -20,19 +20,19 @@ package com.datatorrent.lib.appdata.schemas;
 
 import java.util.Map;
 
-import com.google.common.collect.Maps;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Maps;
+
 public class SnapshotSchemaTest
 {
   public static final String TEST_JSON = "{\n"
-                                        + " \"values\": [\n"
-                                        + "   {\"name\": \"url\", \"type\":\"string\"},\n"
-                                        + "   {\"name\": \"count\", \"type\":\"integer\"}\n"
-                                        + " ]\n"
-                                        + "}";
+      + " \"values\": [\n"
+      + "   {\"name\": \"url\", \"type\":\"string\"},\n"
+      + "   {\"name\": \"count\", \"type\":\"integer\"}\n"
+      + " ]\n"
+      + "}";
 
   @Test
   public void schemaSnapshotFieldTypeTest()

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMapTest.java b/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMapTest.java
index 96b348c..86ccb92 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerMapTest.java
@@ -21,32 +21,36 @@ package com.datatorrent.lib.appdata.snapshot;
 import java.util.List;
 import java.util.Map;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
 import org.codehaus.jettison.json.JSONObject;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.KryoCloneUtils;
 
 public class AppDataSnapshotServerMapTest
 {
-  public static final String SIMPLE_RESULT = "{\"id\":\"1\",\"type\":\"dataQuery\",\"data\":[{\"count\":\"2\",\"word\":\"a\"},{\"count\":\"3\",\"word\":\"b\"}],\"countdown\":10}";
+  public static final String SIMPLE_RESULT = "{\"id\":\"1\"," +
+      "\"type\":\"dataQuery\",\"data\":[{\"count\":\"2\",\"word\":\"a\"},{\"count\":\"3\"," +
+      "\"word\":\"b\"}],\"countdown\":10}";
+
   public static final String SIMPLE_QUERY = "{\"id\": \"1\",\n"
-                                            + "\"type\": \"dataQuery\",\n"
-                                            + "\"data\": {\n"
-                                            + "\"fields\": [ \"word\", \"count\" ]\n"
-                                            + "},\n"
-                                            + "\"countdown\":10\n"
-                                            + "}";
+      + "\"type\": \"dataQuery\",\n"
+      + "\"data\": {\n"
+      + "\"fields\": [ \"word\", \"count\" ]\n"
+      + "},\n"
+      + "\"countdown\":10\n"
+      + "}";
+
   public static final String SIMPLE_SCHEMA = "{\n"
-                                             + "  \"values\": [{\"name\": \"word\", \"type\": \"string\"},\n"
-                                             + "             {\"name\": \"count\", \"type\": \"integer\"}]\n"
-                                             + "}";
+      + "  \"values\": [{\"name\": \"word\", \"type\": \"string\"},\n"
+      + "             {\"name\": \"count\", \"type\": \"integer\"}]\n"
+      + "}";
 
   @Test
   public void simpleTest() throws Exception
@@ -73,7 +77,7 @@ public class AppDataSnapshotServerMapTest
 
     CollectorTestSink<String> resultSink = new CollectorTestSink<String>();
     @SuppressWarnings({"unchecked", "rawtypes"})
-    CollectorTestSink<Object> tempResultSink = (CollectorTestSink) resultSink;
+    CollectorTestSink<Object> tempResultSink = (CollectorTestSink)resultSink;
     snapshotServer.queryResult.setSink(tempResultSink);
 
     snapshotServer.setup(null);
@@ -86,7 +90,7 @@ public class AppDataSnapshotServerMapTest
     snapshotServer.query.put(SIMPLE_QUERY);
     snapshotServer.endWindow();
 
-    String result = (String) tempResultSink.collectedTuples.get(0);
+    String result = (String)tempResultSink.collectedTuples.get(0);
 
     Assert.assertEquals("Should get only 1 result back", 1, tempResultSink.collectedTuples.size());
     Assert.assertEquals("Countdown incorrect", 10, new JSONObject(result).getInt("countdown"));

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPojoTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPojoTest.java b/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPojoTest.java
index a6cbf2b..6e8b0da 100644
--- a/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPojoTest.java
+++ b/library/src/test/java/com/datatorrent/lib/appdata/snapshot/AppDataSnapshotServerPojoTest.java
@@ -21,15 +21,15 @@ package com.datatorrent.lib.appdata.snapshot;
 import java.util.List;
 import java.util.Map;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
 import org.codehaus.jettison.json.JSONObject;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 import com.datatorrent.lib.appdata.schemas.DataQuerySnapshot;
 import com.datatorrent.lib.appdata.schemas.SchemaUtils;
 import com.datatorrent.lib.testbench.CollectorTestSink;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/codec/JavaSerializationStreamCodecTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/codec/JavaSerializationStreamCodecTest.java b/library/src/test/java/com/datatorrent/lib/codec/JavaSerializationStreamCodecTest.java
index be432c8..a67e116 100644
--- a/library/src/test/java/com/datatorrent/lib/codec/JavaSerializationStreamCodecTest.java
+++ b/library/src/test/java/com/datatorrent/lib/codec/JavaSerializationStreamCodecTest.java
@@ -31,102 +31,102 @@ import com.datatorrent.netlet.util.Slice;
  */
 public class JavaSerializationStreamCodecTest
 {
-	static class TestClass implements Serializable
-	{
-		private static final long serialVersionUID = 201301081743L;
-		final String s;
-		final int i;
-
-		TestClass(String s, int i)
-		{
-			this.s = s;
-			this.i = i;
-		}
-
-		TestClass()
-		{
-			s = "default!";
-			i = Integer.MAX_VALUE;
-		}
-
-		@Override
-		public int hashCode()
-		{
-			int hash = 7;
-			hash = 97 * hash + (this.s != null ? this.s.hashCode() : 0);
-			hash = 97 * hash + this.i;
-			return hash;
-		}
-
-		@Override
-		public boolean equals(Object obj)
-		{
-			if (obj == null) {
-				return false;
-			}
-			if (getClass() != obj.getClass()) {
-				return false;
-			}
-			final TestClass other = (TestClass) obj;
-			if ((this.s == null) ? (other.s != null) : !this.s.equals(other.s)) {
-				return false;
-			}
-			if (this.i != other.i) {
-				return false;
-			}
-			return true;
-		}
-
-	}
-
-	@Test
-	public void testSomeMethod() throws IOException
-	{
-		JavaSerializationStreamCodec<Serializable> coder = new JavaSerializationStreamCodec<Serializable>();
-		JavaSerializationStreamCodec<Serializable> decoder = new JavaSerializationStreamCodec<Serializable>();
-
-		TestClass tc = new TestClass("hello!", 42);
-
-		Slice dsp1 = coder.toByteArray(tc);
-		Slice dsp2 = coder.toByteArray(tc);
-		Assert.assertEquals(dsp1, dsp2);
-
-		Object tcObject1 = decoder.fromByteArray(dsp1);
-		assert (tc.equals(tcObject1));
-
-		Object tcObject2 = decoder.fromByteArray(dsp2);
-		assert (tc.equals(tcObject2));
-
-		dsp1 = coder.toByteArray(tc);
-		dsp2 = coder.toByteArray(tc);
-		Assert.assertEquals(dsp1, dsp2);
-	}
-
-	public static class TestTuple implements Serializable
-	{
-		private static final long serialVersionUID = 201301081744L;
-		final Integer finalField;
-
-		@SuppressWarnings("unused")
-		private TestTuple()
-		{
-			finalField = null;
-		}
-
-		public TestTuple(Integer i)
-		{
-			this.finalField = i;
-		}
-
-	}
-
-	@Test
-	public void testFinalFieldSerialization() throws Exception
-	{
-		TestTuple t1 = new TestTuple(5);
-		JavaSerializationStreamCodec<Serializable> c = new JavaSerializationStreamCodec<Serializable>();
-		Slice dsp = c.toByteArray(t1);
-		TestTuple t2 = (TestTuple) c.fromByteArray(dsp);
-		Assert.assertEquals("", t1.finalField, t2.finalField);
-	}
+  static class TestClass implements Serializable
+  {
+    private static final long serialVersionUID = 201301081743L;
+    final String s;
+    final int i;
+
+    TestClass(String s, int i)
+    {
+      this.s = s;
+      this.i = i;
+    }
+
+    TestClass()
+    {
+      s = "default!";
+      i = Integer.MAX_VALUE;
+    }
+
+    @Override
+    public int hashCode()
+    {
+      int hash = 7;
+      hash = 97 * hash + (this.s != null ? this.s.hashCode() : 0);
+      hash = 97 * hash + this.i;
+      return hash;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+      if (obj == null) {
+        return false;
+      }
+      if (getClass() != obj.getClass()) {
+        return false;
+      }
+      final TestClass other = (TestClass)obj;
+      if ((this.s == null) ? (other.s != null) : !this.s.equals(other.s)) {
+        return false;
+      }
+      if (this.i != other.i) {
+        return false;
+      }
+      return true;
+    }
+
+  }
+
+  @Test
+  public void testSomeMethod() throws IOException
+  {
+    JavaSerializationStreamCodec<Serializable> coder = new JavaSerializationStreamCodec<Serializable>();
+    JavaSerializationStreamCodec<Serializable> decoder = new JavaSerializationStreamCodec<Serializable>();
+
+    TestClass tc = new TestClass("hello!", 42);
+
+    Slice dsp1 = coder.toByteArray(tc);
+    Slice dsp2 = coder.toByteArray(tc);
+    Assert.assertEquals(dsp1, dsp2);
+
+    Object tcObject1 = decoder.fromByteArray(dsp1);
+    assert (tc.equals(tcObject1));
+
+    Object tcObject2 = decoder.fromByteArray(dsp2);
+    assert (tc.equals(tcObject2));
+
+    dsp1 = coder.toByteArray(tc);
+    dsp2 = coder.toByteArray(tc);
+    Assert.assertEquals(dsp1, dsp2);
+  }
+
+  public static class TestTuple implements Serializable
+  {
+    private static final long serialVersionUID = 201301081744L;
+    final Integer finalField;
+
+    @SuppressWarnings("unused")
+    private TestTuple()
+    {
+      finalField = null;
+    }
+
+    public TestTuple(Integer i)
+    {
+      this.finalField = i;
+    }
+
+  }
+
+  @Test
+  public void testFinalFieldSerialization() throws Exception
+  {
+    TestTuple t1 = new TestTuple(5);
+    JavaSerializationStreamCodec<Serializable> c = new JavaSerializationStreamCodec<Serializable>();
+    Slice dsp = c.toByteArray(t1);
+    TestTuple t2 = (TestTuple)c.fromByteArray(dsp);
+    Assert.assertEquals("", t1.finalField, t2.finalField);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/converter/ByteArrayToStringConverterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/converter/ByteArrayToStringConverterTest.java b/library/src/test/java/com/datatorrent/lib/converter/ByteArrayToStringConverterTest.java
index 99ae5b7..12f5d83 100644
--- a/library/src/test/java/com/datatorrent/lib/converter/ByteArrayToStringConverterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/converter/ByteArrayToStringConverterTest.java
@@ -18,14 +18,16 @@
  */
 package com.datatorrent.lib.converter;
 
-import com.datatorrent.lib.testbench.CollectorTestSink;
-import com.datatorrent.lib.util.TestUtils;
 import java.io.UnsupportedEncodingException;
+
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.lib.testbench.CollectorTestSink;
+import com.datatorrent.lib.util.TestUtils;
+
 public class ByteArrayToStringConverterTest
 {
   @Test
@@ -69,5 +71,6 @@ public class ByteArrayToStringConverterTest
     Assert.assertEquals(test1, testsink.collectedTuples.get(0));
 
   }
+
   private static final Logger logger = LoggerFactory.getLogger(ByteArrayToStringConverterTest.class);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java b/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java
index bbd5386..dd19d2b 100644
--- a/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java
@@ -27,8 +27,8 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.KeyValPair;
 import com.datatorrent.lib.util.TestUtils;
 
-public class MapToKeyValuePairConverterTest {
-
+public class MapToKeyValuePairConverterTest
+{
   @Test
   public void MapToKeyValuePairConversion() 
   {
@@ -37,10 +37,9 @@ public class MapToKeyValuePairConverterTest {
     String[] keys = {"a", "b", "c"};
     
     HashMap<String, Integer> inputMap = new HashMap<String, Integer>();
-    
-    for(int i =0 ; i < 3; i++)
-    {
-      inputMap.put(keys[i], values[i]);      
+
+    for (int i = 0; i < 3; i++) {
+      inputMap.put(keys[i], values[i]);
     }
     
     CollectorTestSink<KeyValPair<String, Integer>> testsink = new CollectorTestSink<KeyValPair<String, Integer>>();    

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java b/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java
index fb1f605..8a5eed2 100644
--- a/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java
@@ -18,19 +18,17 @@
  */
 package com.datatorrent.lib.converter;
 
-import java.io.UnsupportedEncodingException;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.Map.Entry;
 
 import org.junit.Assert;
 import org.junit.Test;
 
 import com.datatorrent.lib.testbench.CollectorTestSink;
-import com.datatorrent.lib.util.KeyValPair;
 import com.datatorrent.lib.util.TestUtils;
 
-public class StringValueToNumberConverterForMapTest {
+public class StringValueToNumberConverterForMapTest
+{
 
   @Test
   public void testStringValueToNumericConversion() 
@@ -40,10 +38,9 @@ public class StringValueToNumberConverterForMapTest {
     String[] keys = {"a", "b", "c"};
     
     HashMap<String, String> inputMap = new HashMap<String, String>();
-    
-    for(int i =0 ; i < 3; i++)
-    {
-      inputMap.put(keys[i], values[i]);      
+
+    for (int i = 0; i < 3; i++) {
+      inputMap.put(keys[i], values[i]);
     }
     
     CollectorTestSink<Map<String, Number>> testsink = new CollectorTestSink<Map<String, Number>>();    
@@ -56,15 +53,12 @@ public class StringValueToNumberConverterForMapTest {
     testop.endWindow();
 
     Assert.assertEquals(1,testsink.collectedTuples.size());
-    
-    int cnt = 0;
-    
-    Map<String, Number> output= testsink.collectedTuples.get(0);
-    
-    Assert.assertEquals(output.get("a"), 1.0);      
-    Assert.assertEquals(output.get("b"), 2.0);      
-    Assert.assertEquals(output.get("c"), 3.0);      
-    
-    
+
+    Map<String, Number> output = testsink.collectedTuples.get(0);
+
+    Assert.assertEquals(output.get("a"), 1.0);
+    Assert.assertEquals(output.get("b"), 2.0);
+    Assert.assertEquals(output.get("c"), 3.0);
+
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/counters/BasicCountersTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/counters/BasicCountersTest.java b/library/src/test/java/com/datatorrent/lib/counters/BasicCountersTest.java
index 36416d6..b3c76d1 100644
--- a/library/src/test/java/com/datatorrent/lib/counters/BasicCountersTest.java
+++ b/library/src/test/java/com/datatorrent/lib/counters/BasicCountersTest.java
@@ -21,13 +21,13 @@ package com.datatorrent.lib.counters;
 import java.util.List;
 import java.util.Map;
 
-import com.google.common.collect.Lists;
-
 import org.junit.Assert;
 import org.junit.Test;
 
 import org.apache.commons.lang.mutable.MutableDouble;
 
+import com.google.common.collect.Lists;
+
 import com.datatorrent.common.util.NumberAggregate;
 
 /**
@@ -69,7 +69,8 @@ public class BasicCountersTest
 
     BasicCounters.DoubleAggregator<MutableDouble> aggregator = new BasicCounters.DoubleAggregator<MutableDouble>();
     @SuppressWarnings("unchecked")
-    Map<String, NumberAggregate.DoubleAggregate> aggregateMap = (Map<String, NumberAggregate.DoubleAggregate>) aggregator.aggregate(physicalCounters);
+    Map<String, NumberAggregate.DoubleAggregate> aggregateMap =
+        (Map<String, NumberAggregate.DoubleAggregate>)aggregator.aggregate(physicalCounters);
 
     Assert.assertNotNull("null", aggregateMap.get(CounterKeys.A.name()));
     NumberAggregate.DoubleAggregate aggregate = aggregateMap.get(CounterKeys.A.name());

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/db/KeyValueStoreOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/db/KeyValueStoreOperatorTest.java b/library/src/test/java/com/datatorrent/lib/db/KeyValueStoreOperatorTest.java
index 13ca6d6..90e67d6 100644
--- a/library/src/test/java/com/datatorrent/lib/db/KeyValueStoreOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/db/KeyValueStoreOperatorTest.java
@@ -24,11 +24,11 @@ import java.util.Map;
 
 import org.junit.Assert;
 
-import com.datatorrent.lib.helper.OperatorContextTestHelper;
-
-import com.datatorrent.api.*;
-
+import com.datatorrent.api.DAG;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.LocalMode;
 import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.lib.helper.OperatorContextTestHelper;
 
 /**
  * @param <S>
@@ -55,7 +55,7 @@ public class KeyValueStoreOperatorTest<S extends KeyValueStore>
       public void process(T t)
       {
         @SuppressWarnings("unchecked")
-        Map<String, String> map = (Map<String, String>) t;
+        Map<String, String> map = (Map<String, String>)t;
         resultMap.putAll(map);
         resultCount++;
       }
@@ -69,7 +69,7 @@ public class KeyValueStoreOperatorTest<S extends KeyValueStore>
     @SuppressWarnings("unchecked")
     public Map<String, String> convertToTuple(Map<Object, Object> o)
     {
-      return (Map<String, String>) (Map<?, ?>) o;
+      return (Map<String, String>)(Map<?, ?>)o;
     }
 
   }
@@ -80,7 +80,7 @@ public class KeyValueStoreOperatorTest<S extends KeyValueStore>
     @SuppressWarnings("unchecked")
     public void processTuple(Map<String, String> tuple)
     {
-      store.putAll((Map<Object, Object>) (Map<?, ?>) tuple);
+      store.putAll((Map<Object, Object>)(Map<?, ?>)tuple);
     }
 
   }
@@ -109,8 +109,7 @@ public class KeyValueStoreOperatorTest<S extends KeyValueStore>
       Assert.assertEquals("456", CollectorModule.resultMap.get("test_def"));
       Assert.assertEquals("123", CollectorModule.resultMap.get("test_ghi"));
 
-    }
-    finally {
+    } finally {
       testStore.remove("test_abc");
       testStore.remove("test_def");
       testStore.remove("test_ghi");
@@ -140,8 +139,8 @@ public class KeyValueStoreOperatorTest<S extends KeyValueStore>
       Assert.assertEquals("123", testStore.get("test_abc"));
       Assert.assertEquals("456", testStore.get("test_def"));
       Assert.assertEquals("789", testStore.get("test_ghi"));
-    }
-    finally {
+
+    } finally {
       testStore.remove("test_abc");
       testStore.remove("test_def");
       testStore.remove("test_ghi");

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/db/TransactionableKeyValueStoreOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/db/TransactionableKeyValueStoreOperatorTest.java b/library/src/test/java/com/datatorrent/lib/db/TransactionableKeyValueStoreOperatorTest.java
index 416c3e5..d37863d 100644
--- a/library/src/test/java/com/datatorrent/lib/db/TransactionableKeyValueStoreOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/db/TransactionableKeyValueStoreOperatorTest.java
@@ -45,7 +45,7 @@ public class TransactionableKeyValueStoreOperatorTest<S extends TransactionableK
     @SuppressWarnings("unchecked")
     public void processTuple(Map<String, String> tuple)
     {
-      store.putAll((Map<Object, Object>) (Map<?, ?>) tuple);
+      store.putAll((Map<Object, Object>)(Map<?, ?>)tuple);
     }
 
   }
@@ -80,8 +80,7 @@ public class TransactionableKeyValueStoreOperatorTest<S extends TransactionableK
       Assert.assertEquals("123", testStore.get("test_abc"));
       Assert.assertEquals("456", testStore.get("test_def"));
       Assert.assertEquals("789", testStore.get("test_ghi"));
-    }
-    finally {
+    } finally {
       testStore.remove("test_abc");
       testStore.remove("test_def");
       testStore.remove("test_ghi");


[12/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/statistics/WeightedMeanOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/statistics/WeightedMeanOperator.java b/library/src/main/java/com/datatorrent/lib/statistics/WeightedMeanOperator.java
index a312962..7aa86be 100644
--- a/library/src/main/java/com/datatorrent/lib/statistics/WeightedMeanOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/statistics/WeightedMeanOperator.java
@@ -82,7 +82,9 @@ public class WeightedMeanOperator<V extends Number>  extends BaseNumberValueOper
     @Override
     public void process(V tuple)
     {
-      if (tuple.doubleValue() != 0.0) currentWeight = tuple.doubleValue();
+      if (tuple.doubleValue() != 0.0) {
+        currentWeight = tuple.doubleValue();
+      }
     }
   };
 
@@ -101,7 +103,7 @@ public class WeightedMeanOperator<V extends Number>  extends BaseNumberValueOper
   public void endWindow()
   {
     if (weightedCount != 0.0) {
-       mean.emit(getAverage());
+      mean.emit(getAverage());
     }
     weightedSum = 0.0;
     weightedCount = 0.0;
@@ -123,21 +125,21 @@ public class WeightedMeanOperator<V extends Number>  extends BaseNumberValueOper
         val = num.doubleValue() / weightedCount;
         break;
       case INTEGER:
-        val = (int) (num.intValue() / weightedCount);
+        val = (int)(num.intValue() / weightedCount);
         break;
       case FLOAT:
         val = new Float(num.floatValue() / weightedCount);
         break;
       case LONG:
-        val = (long) (num.longValue() / weightedCount);
+        val = (long)(num.longValue() / weightedCount);
         break;
       case SHORT:
-        val = (short) (num.shortValue() / weightedCount);
+        val = (short)(num.shortValue() / weightedCount);
         break;
       default:
         val = num.doubleValue() / weightedCount;
         break;
     }
-    return (V) val;
+    return (V)val;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/AbstractAggregator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/AbstractAggregator.java b/library/src/main/java/com/datatorrent/lib/stream/AbstractAggregator.java
index 9a8daa3..abb6be5 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/AbstractAggregator.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/AbstractAggregator.java
@@ -48,97 +48,97 @@ import com.datatorrent.api.annotation.OperatorAnnotation;
 @OperatorAnnotation(partitionable = false)
 public abstract class AbstractAggregator<T> implements Operator
 {
-	/**
-	 * collection of input values.
-	 */
-	protected Collection<T> collection;
-	@Min(0)
-	/**
-	 * size of output collection, all tuples till end window if set to 0.
-	 */
-	private int size = 0;
+  /**
+   * collection of input values.
+   */
+  protected Collection<T> collection;
+  @Min(0)
+  /**
+   * size of output collection, all tuples till end window if set to 0.
+   */
+  private int size = 0;
 
-	/**
-	 * Input port that takes data to be added to a collection.
-	 */
-	public final transient DefaultInputPort<T> input = new DefaultInputPort<T>()
-	{
-		@Override
-		public void process(T tuple)
-		{
-			if (collection == null) {
-				collection = getNewCollection(size);
-			}
-			collection.add(tuple);
-			if (collection.size() == size) {
-				output.emit(collection);
-				collection = null;
-			}
-		}
+  /**
+   * Input port that takes data to be added to a collection.
+   */
+  public final transient DefaultInputPort<T> input = new DefaultInputPort<T>()
+  {
+    @Override
+    public void process(T tuple)
+    {
+      if (collection == null) {
+        collection = getNewCollection(size);
+      }
+      collection.add(tuple);
+      if (collection.size() == size) {
+        output.emit(collection);
+        collection = null;
+      }
+    }
 
-	};
+  };
 
-	/**
-	 * Output port that emits a collection.
-	 */
-	public final transient DefaultOutputPort<Collection<T>> output = new DefaultOutputPort<Collection<T>>();
+  /**
+   * Output port that emits a collection.
+   */
+  public final transient DefaultOutputPort<Collection<T>> output = new DefaultOutputPort<Collection<T>>();
 
-	/**
-	 * Set the size of the collection.
-	 *
-	 * If set to zero, the collection collects all the tuples within a window and
-	 * emits the collection as 1 output tuple at the end of the window. If set to
-	 * positive value, it collects the collection as soon as the size of the
-	 * collection reaches the size.
-	 *
-	 * @param size
-	 *          the size to set
-	 */
-	public void setSize(int size)
-	{
-		this.size = size;
-	}
+  /**
+   * Set the size of the collection.
+   *
+   * If set to zero, the collection collects all the tuples within a window and
+   * emits the collection as 1 output tuple at the end of the window. If set to
+   * positive value, it collects the collection as soon as the size of the
+   * collection reaches the size.
+   *
+   * @param size
+   *          the size to set
+   */
+  public void setSize(int size)
+  {
+    this.size = size;
+  }
 
-	/**
-	 * Size of collection.
-	 *
-	 * @return size of collection
-	 */
-	@Min(0)
-	public int getSize()
-	{
-		return size;
-	}
+  /**
+   * Size of collection.
+   *
+   * @return size of collection
+   */
+  @Min(0)
+  public int getSize()
+  {
+    return size;
+  }
 
-	/**
-	 * Abstract method to get collection of given size.
-	 *
-	 * @param size
-	 * @return collection
-	 */
-	public abstract Collection<T> getNewCollection(int size);
+  /**
+   * Abstract method to get collection of given size.
+   *
+   * @param size
+   * @return collection
+   */
+  public abstract Collection<T> getNewCollection(int size);
 
-	@Override
-	public void beginWindow(long windowId)
-	{
-	}
+  @Override
+  public void beginWindow(long windowId)
+  {
+  }
 
-	@Override
-	public void endWindow()
-	{
-		if (size == 0 && collection != null) {
-			output.emit(collection);
-			collection = null;
-		}
-	}
+  @Override
+  public void endWindow()
+  {
+    if (size == 0 && collection != null) {
+      output.emit(collection);
+      collection = null;
+    }
+  }
 
-	@Override
-	public void setup(OperatorContext context)
-	{
-	}
+  @Override
+  public void setup(OperatorContext context)
+  {
+  }
 
-	@Override
-	public void teardown()
-	{
-	}
+  @Override
+  public void teardown()
+  {
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/ArrayListToItem.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/ArrayListToItem.java b/library/src/main/java/com/datatorrent/lib/stream/ArrayListToItem.java
index 106d16d..efd807a 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/ArrayListToItem.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/ArrayListToItem.java
@@ -18,11 +18,12 @@
  */
 package com.datatorrent.lib.stream;
 
+import java.util.ArrayList;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.Stateless;
 import com.datatorrent.lib.util.BaseKeyOperator;
-import java.util.ArrayList;
 
 /**
  * An implementation of BaseKeyOperator that breaks up an ArrayList tuple into Objects.
@@ -42,25 +43,25 @@ import java.util.ArrayList;
 @Stateless
 public class ArrayListToItem<K> extends BaseKeyOperator<K>
 {
-	/**
-	 * Input data port that takes an arraylist.
-	 */
-	public final transient DefaultInputPort<ArrayList<K>> data = new DefaultInputPort<ArrayList<K>>()
-	{
-		/**
-		 * Emit one item at a time
-		 */
-		@Override
-		public void process(ArrayList<K> tuple)
-		{
-			for (K k : tuple) {
-				item.emit(cloneKey(k));
-			}
-		}
-	};
+  /**
+   * Input data port that takes an arraylist.
+   */
+  public final transient DefaultInputPort<ArrayList<K>> data = new DefaultInputPort<ArrayList<K>>()
+  {
+    /**
+     * Emit one item at a time
+     */
+    @Override
+    public void process(ArrayList<K> tuple)
+    {
+      for (K k : tuple) {
+        item.emit(cloneKey(k));
+      }
+    }
+  };
 
-	/**
-	 * Output port that emits an array item.
-	 */
-	public final transient DefaultOutputPort<K> item = new DefaultOutputPort<K>();
+  /**
+   * Output port that emits an array item.
+   */
+  public final transient DefaultOutputPort<K> item = new DefaultOutputPort<K>();
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/ConsolidatorKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/ConsolidatorKeyVal.java b/library/src/main/java/com/datatorrent/lib/stream/ConsolidatorKeyVal.java
index 148f0b5..6874796 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/ConsolidatorKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/ConsolidatorKeyVal.java
@@ -50,137 +50,137 @@ import com.datatorrent.lib.util.KeyValPair;
 @OperatorAnnotation(partitionable = false)
 public class ConsolidatorKeyVal<K, V1, V2, V3, V4, V5> implements Operator
 {
-	/**
-	 * key/array values output result.
-	 */
-	protected HashMap<K, ArrayList<Object>> result;
-
-	@Override
-	public void setup(OperatorContext context)
-	{
-	}
-
-	@Override
-	public void teardown()
-	{
-	}
-
-	/**
-	 * <p>
-	 * Class operates on <K,V> pair, stores value in given number position in
-	 * list. <br>
-	 *
-	 * @param <V>
-	 *          value type.
-	 */
-	public class ConsolidatorInputPort<V> extends
-			DefaultInputPort<KeyValPair<K, V>>
-	{
-		/**
-		 * Value position in list.
-		 */
-		private int number;
-
-		/**
-		 * Constructor
-		 *
-		 * @param oper
-		 *          Connected operator.
-		 * @param num
-		 *          Value position in list.
-		 */
-		ConsolidatorInputPort(Operator oper, int num)
-		{
-			super();
-			number = num;
-		}
-
-		/**
-		 * Process key/value pair.
-		 */
-		@Override
-		public void process(KeyValPair<K, V> tuple)
-		{
-			K key = tuple.getKey();
-			ArrayList<Object> list = getObject(key);
-			list.set(number, tuple.getValue());
-		}
-
-	}
-
-	/**
-	 * V1 type value input port.
-	 */
-	public final transient ConsolidatorInputPort<V1> in1 = new ConsolidatorInputPort<V1>(
-			this, 0);
-
-	/**
-	 * V2 type value input port.
-	 */
-	public final transient ConsolidatorInputPort<V2> in2 = new ConsolidatorInputPort<V2>(
-			this, 1);
-
-	/**
-	 * V3 type value input port.
-	 */
-	@InputPortFieldAnnotation(optional = true)
-	public final transient ConsolidatorInputPort<V3> in3 = new ConsolidatorInputPort<V3>(
-			this, 2);
-
-	/**
-	 * V4 type value input port.
-	 */
-	@InputPortFieldAnnotation(optional = true)
-	public final transient ConsolidatorInputPort<V4> in4 = new ConsolidatorInputPort<V4>(
-			this, 3);
-
-	/**
-	 * V5 type value input port.
-	 */
-	@InputPortFieldAnnotation(optional = true)
-	public final transient ConsolidatorInputPort<V5> in5 = new ConsolidatorInputPort<V5>(
-			this, 4);
-
-	/**
-	 * Output port that emits a hashmap of &lt;key,arraylist&gt;.
-	 */
-	public final transient DefaultOutputPort<HashMap<K, ArrayList<Object>>> out = new DefaultOutputPort<HashMap<K, ArrayList<Object>>>();
-
-	/**
-	 * Get array list object for given key
-	 *
-	 * @param k  key
-	 * @return array list for key.
-	 */
-	public ArrayList<Object> getObject(K k)
-	{
-		ArrayList<Object> val = result.get(k);
-		if (val == null) {
-			val = new ArrayList<Object>(5);
-			val.add(0, null);
-			val.add(1, null);
-			val.add(2, null);
-			val.add(3, null);
-			val.add(4, null);
-			result.put(k, val);
-		}
-		return val;
-	}
-
-	@Override
-	public void beginWindow(long windowId)
-	{
-		result = new HashMap<K, ArrayList<Object>>();
-	}
-
-	/**
-	 * Emits merged data
-	 */
-	@Override
-	public void endWindow()
-	{
-		if (!result.isEmpty()) {
-			out.emit(result);
-		}
-	}
+  /**
+   * key/array values output result.
+   */
+  protected HashMap<K, ArrayList<Object>> result;
+
+  @Override
+  public void setup(OperatorContext context)
+  {
+  }
+
+  @Override
+  public void teardown()
+  {
+  }
+
+  /**
+   * <p>
+   * Class operates on <K,V> pair, stores value in given number position in
+   * list. <br>
+   *
+   * @param <V>
+   *          value type.
+   */
+  public class ConsolidatorInputPort<V> extends
+      DefaultInputPort<KeyValPair<K, V>>
+  {
+    /**
+     * Value position in list.
+     */
+    private int number;
+
+    /**
+     * Constructor
+     *
+     * @param oper
+     *          Connected operator.
+     * @param num
+     *          Value position in list.
+     */
+    ConsolidatorInputPort(Operator oper, int num)
+    {
+      super();
+      number = num;
+    }
+
+    /**
+     * Process key/value pair.
+     */
+    @Override
+    public void process(KeyValPair<K, V> tuple)
+    {
+      K key = tuple.getKey();
+      ArrayList<Object> list = getObject(key);
+      list.set(number, tuple.getValue());
+    }
+
+  }
+
+  /**
+   * V1 type value input port.
+   */
+  public final transient ConsolidatorInputPort<V1> in1 = new ConsolidatorInputPort<V1>(
+      this, 0);
+
+  /**
+   * V2 type value input port.
+   */
+  public final transient ConsolidatorInputPort<V2> in2 = new ConsolidatorInputPort<V2>(
+      this, 1);
+
+  /**
+   * V3 type value input port.
+   */
+  @InputPortFieldAnnotation(optional = true)
+  public final transient ConsolidatorInputPort<V3> in3 = new ConsolidatorInputPort<V3>(
+      this, 2);
+
+  /**
+   * V4 type value input port.
+   */
+  @InputPortFieldAnnotation(optional = true)
+  public final transient ConsolidatorInputPort<V4> in4 = new ConsolidatorInputPort<V4>(
+      this, 3);
+
+  /**
+   * V5 type value input port.
+   */
+  @InputPortFieldAnnotation(optional = true)
+  public final transient ConsolidatorInputPort<V5> in5 = new ConsolidatorInputPort<V5>(
+      this, 4);
+
+  /**
+   * Output port that emits a hashmap of &lt;key,arraylist&gt;.
+   */
+  public final transient DefaultOutputPort<HashMap<K, ArrayList<Object>>> out = new DefaultOutputPort<HashMap<K, ArrayList<Object>>>();
+
+  /**
+   * Get array list object for given key
+   *
+   * @param k  key
+   * @return array list for key.
+   */
+  public ArrayList<Object> getObject(K k)
+  {
+    ArrayList<Object> val = result.get(k);
+    if (val == null) {
+      val = new ArrayList<Object>(5);
+      val.add(0, null);
+      val.add(1, null);
+      val.add(2, null);
+      val.add(3, null);
+      val.add(4, null);
+      result.put(k, val);
+    }
+    return val;
+  }
+
+  @Override
+  public void beginWindow(long windowId)
+  {
+    result = new HashMap<K, ArrayList<Object>>();
+  }
+
+  /**
+   * Emits merged data
+   */
+  @Override
+  public void endWindow()
+  {
+    if (!result.isEmpty()) {
+      out.emit(result);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/Counter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/Counter.java b/library/src/main/java/com/datatorrent/lib/stream/Counter.java
index 67aedb4..8de2653 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/Counter.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/Counter.java
@@ -42,59 +42,59 @@ import com.datatorrent.api.Operator.Unifier;
  */
 public class Counter implements Operator, Unifier<Integer>
 {
-        /**
-	 * Input port that takes objects to be counted in each window.
-	 */
-	public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
-	{
-		@Override
-		public void process(Object tuple)
-		{
-			count++;
-		}
+  /**
+   * Input port that takes objects to be counted in each window.
+   */
+  public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
+  {
+    @Override
+    public void process(Object tuple)
+    {
+      count++;
+    }
 
-	};
+  };
 
-          /**
-	 * Output port that takes emits count in each window.
-	 */
-	public final transient DefaultOutputPort<Integer> output = new DefaultOutputPort<Integer>()
-	{
-		@Override
-		public Unifier<Integer> getUnifier()
-		{
-			return Counter.this;
-		}
+  /**
+   * Output port that takes emits count in each window.
+   */
+  public final transient DefaultOutputPort<Integer> output = new DefaultOutputPort<Integer>()
+  {
+    @Override
+    public Unifier<Integer> getUnifier()
+    {
+      return Counter.this;
+    }
 
-	};
+  };
 
-	@Override
-	public void beginWindow(long windowId)
-	{
-		count = 0;
-	}
+  @Override
+  public void beginWindow(long windowId)
+  {
+    count = 0;
+  }
 
-	@Override
-	public void process(Integer tuple)
-	{
-		count += tuple;
-	}
+  @Override
+  public void process(Integer tuple)
+  {
+    count += tuple;
+  }
 
-	@Override
-	public void endWindow()
-	{
-		output.emit(count);
-	}
+  @Override
+  public void endWindow()
+  {
+    output.emit(count);
+  }
 
-	@Override
-	public void setup(OperatorContext context)
-	{
-	}
+  @Override
+  public void setup(OperatorContext context)
+  {
+  }
 
-	@Override
-	public void teardown()
-	{
-	}
+  @Override
+  public void teardown()
+  {
+  }
 
-	private transient int count;
+  private transient int count;
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/DevNull.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/DevNull.java b/library/src/main/java/com/datatorrent/lib/stream/DevNull.java
index 8ced16b..877b562 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/DevNull.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/DevNull.java
@@ -18,9 +18,9 @@
  */
 package com.datatorrent.lib.stream;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * An implementation of BaseOperator that terminates a stream and does not affect the tuple.
@@ -39,15 +39,15 @@ import com.datatorrent.api.annotation.Stateless;
 @Stateless
 public class DevNull<K> extends BaseOperator
 {
-	/**
-	 * Input any data type port.
-	 */
-	public final transient DefaultInputPort<K> data = new DefaultInputPort<K>()
-	{
-		@Override
-		public void process(K tuple)
-		{
-			// Does nothing; allows a stream to terminate and therefore be debugged
-		}
-	};
+  /**
+   * Input any data type port.
+   */
+  public final transient DefaultInputPort<K> data = new DefaultInputPort<K>()
+  {
+    @Override
+    public void process(K tuple)
+    {
+      // Does nothing; allows a stream to terminate and therefore be debugged
+    }
+  };
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/DevNullCounter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/DevNullCounter.java b/library/src/main/java/com/datatorrent/lib/stream/DevNullCounter.java
index e8bea13..87093fe 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/DevNullCounter.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/DevNullCounter.java
@@ -18,14 +18,15 @@
  */
 package com.datatorrent.lib.stream;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.Context.OperatorContext;
-
 import javax.validation.constraints.Min;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * An implementation of BaseOperator that is used for logging by counting the tuple and then drops it.
  * <p>
@@ -48,141 +49,140 @@ import org.slf4j.LoggerFactory;
 public class DevNullCounter<K> extends BaseOperator
 {
         /**
-	 * Input port that takes objects to be counted in each window.
-	 */
-	public final transient DefaultInputPort<K> data = new DefaultInputPort<K>()
-	{
-		/**
-		 * Process each tuple. Expects upstream node to compute number of tuples in
-		 * that window and send it as an int<br>
-		 *
-		 * @param tuple
-		 */
-		@Override
-		public void process(K tuple)
-		{
-			tuple_count++;
-		}
-	};
-	private static Logger log = LoggerFactory.getLogger(DevNullCounter.class);
-	private long windowStartTime = 0;
-	long[] tuple_numbers = null;
-	long[] time_numbers = null;
-	int tuple_index = 0;
-	int count_denominator = 1;
-	long count_windowid = 0;
-	long tuple_count = 1; // so that the first begin window starts the count down
+   * Input port that takes objects to be counted in each window.
+   */
+  public final transient DefaultInputPort<K> data = new DefaultInputPort<K>()
+  {
+    /**
+     * Process each tuple. Expects upstream node to compute number of tuples in
+     * that window and send it as an int<br>
+     *
+     * @param tuple
+     */
+    @Override
+    public void process(K tuple)
+    {
+      tuple_count++;
+    }
+  };
+  private static Logger log = LoggerFactory.getLogger(DevNullCounter.class);
+  private long windowStartTime = 0;
+  long[] tuple_numbers = null;
+  long[] time_numbers = null;
+  int tuple_index = 0;
+  int count_denominator = 1;
+  long count_windowid = 0;
+  long tuple_count = 1; // so that the first begin window starts the count down
 
-	private boolean debug = true;
+  private boolean debug = true;
 
-	/**
-	 * getter function for debug state
-	 *
-	 * @return debug state
-	 */
-	public boolean getDebug()
-	{
-		return debug;
-	}
+  /**
+   * getter function for debug state
+   *
+   * @return debug state
+   */
+  public boolean getDebug()
+  {
+    return debug;
+  }
 
-	/**
-	 * setter function for debug state
-	 *
-	 * @param i
-	 *          sets debug to i
-	 */
-	public void setDebug(boolean i)
-	{
-		debug = i;
-	}
+  /**
+   * setter function for debug state
+   *
+   * @param i
+   *          sets debug to i
+   */
+  public void setDebug(boolean i)
+  {
+    debug = i;
+  }
 
-	@Min(1)
-	private int rollingwindowcount = 1;
+  @Min(1)
+  private int rollingwindowcount = 1;
 
-	public void setRollingwindowcount(int val)
-	{
-		rollingwindowcount = val;
-	}
+  public void setRollingwindowcount(int val)
+  {
+    rollingwindowcount = val;
+  }
 
-	/**
-	 * Sets up all the config parameters. Assumes checking is done and has passed
-	 *
-	 * @param context
-	 */
-	@Override
-	public void setup(OperatorContext context)
-	{
-		windowStartTime = 0;
-		if (rollingwindowcount != 1) { // Initialized the tuple_numbers
-			tuple_numbers = new long[rollingwindowcount];
-			time_numbers = new long[rollingwindowcount];
-			for (int i = tuple_numbers.length; i > 0; i--) {
-				tuple_numbers[i - 1] = 0;
-				time_numbers[i - 1] = 0;
-			}
-			tuple_index = 0;
-		}
-	}
+  /**
+   * Sets up all the config parameters. Assumes checking is done and has passed
+   *
+   * @param context
+   */
+  @Override
+  public void setup(OperatorContext context)
+  {
+    windowStartTime = 0;
+    if (rollingwindowcount != 1) { // Initialized the tuple_numbers
+      tuple_numbers = new long[rollingwindowcount];
+      time_numbers = new long[rollingwindowcount];
+      for (int i = tuple_numbers.length; i > 0; i--) {
+        tuple_numbers[i - 1] = 0;
+        time_numbers[i - 1] = 0;
+      }
+      tuple_index = 0;
+    }
+  }
 
-	@Override
-	public void beginWindow(long windowId)
-	{
-		if (tuple_count != 0) { // Do not restart time if no tuples were sent
-			windowStartTime = System.currentTimeMillis();
-			tuple_count = 0;
-		}
-	}
+  @Override
+  public void beginWindow(long windowId)
+  {
+    if (tuple_count != 0) { // Do not restart time if no tuples were sent
+      windowStartTime = System.currentTimeMillis();
+      tuple_count = 0;
+    }
+  }
 
-	/**
-	 * convenient method for not sending more than configured number of windows.
-	 */
-	@Override
-	public void endWindow()
-	{
-		if (!debug) {
-			return;
-		}
-		if (tuple_count == 0) {
-			return;
-		}
-		long elapsedTime = System.currentTimeMillis() - windowStartTime;
-		if (elapsedTime == 0) {
-			elapsedTime = 1; // prevent from / zero
-		}
+  /**
+   * convenient method for not sending more than configured number of windows.
+   */
+  @Override
+  public void endWindow()
+  {
+    if (!debug) {
+      return;
+    }
+    if (tuple_count == 0) {
+      return;
+    }
+    long elapsedTime = System.currentTimeMillis() - windowStartTime;
+    if (elapsedTime == 0) {
+      elapsedTime = 1; // prevent from / zero
+    }
 
-		long average;
-		long tuples_per_sec = (tuple_count * 1000) / elapsedTime; // * 1000 as
-																															// elapsedTime is
-																															// in millis
-		if (rollingwindowcount == 1) {
-			average = tuples_per_sec;
-		} else { // use tuple_numbers
-			long slots;
-			if (count_denominator == rollingwindowcount) {
-				tuple_numbers[tuple_index] = tuple_count;
-				time_numbers[tuple_index] = elapsedTime;
-				slots = rollingwindowcount;
-				tuple_index++;
-				if (tuple_index == rollingwindowcount) {
-					tuple_index = 0;
-				}
-			} else {
-				tuple_numbers[count_denominator - 1] = tuple_count;
-				time_numbers[count_denominator - 1] = elapsedTime;
-				slots = count_denominator;
-				count_denominator++;
-			}
-			long time_slot = 0;
-			long numtuples = 0;
-			for (int i = 0; i < slots; i++) {
-				numtuples += tuple_numbers[i];
-				time_slot += time_numbers[i];
-			}
-			average = (numtuples * 1000) / time_slot;
-		}
-		log.debug(String
-				.format(
-						"\nWindowid (%d), Time (%d ms): The rate for %d tuples is %d. This window had %d tuples_per_sec ",
-						count_windowid++, elapsedTime, tuple_count, average, tuples_per_sec));
-	}
+    long average;
+    long tuples_per_sec = (tuple_count * 1000) / elapsedTime; // * 1000 as
+                                                              // elapsedTime is
+                                                              // in millis
+    if (rollingwindowcount == 1) {
+      average = tuples_per_sec;
+    } else { // use tuple_numbers
+      long slots;
+      if (count_denominator == rollingwindowcount) {
+        tuple_numbers[tuple_index] = tuple_count;
+        time_numbers[tuple_index] = elapsedTime;
+        slots = rollingwindowcount;
+        tuple_index++;
+        if (tuple_index == rollingwindowcount) {
+          tuple_index = 0;
+        }
+      } else {
+        tuple_numbers[count_denominator - 1] = tuple_count;
+        time_numbers[count_denominator - 1] = elapsedTime;
+        slots = count_denominator;
+        count_denominator++;
+      }
+      long time_slot = 0;
+      long numtuples = 0;
+      for (int i = 0; i < slots; i++) {
+        numtuples += tuple_numbers[i];
+        time_slot += time_numbers[i];
+      }
+      average = (numtuples * 1000) / time_slot;
+    }
+    log.debug(String.format(
+        "\nWindowid (%d), Time (%d ms): The rate for %d tuples is %d. This window had %d tuples_per_sec ",
+        count_windowid++, elapsedTime, tuple_count, average, tuples_per_sec));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/HashMapToKeyValPair.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/HashMapToKeyValPair.java b/library/src/main/java/com/datatorrent/lib/stream/HashMapToKeyValPair.java
index 03dba6d..29ce727 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/HashMapToKeyValPair.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/HashMapToKeyValPair.java
@@ -18,14 +18,15 @@
  */
 package com.datatorrent.lib.stream;
 
+import java.util.HashMap;
+import java.util.Map;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.api.annotation.Stateless;
 import com.datatorrent.lib.util.BaseKeyValueOperator;
 import com.datatorrent.lib.util.KeyValPair;
-import java.util.HashMap;
-import java.util.Map;
 
 /**
  * An implementation of BaseKeyValueOperator that breaks a HashMap tuple into objects.
@@ -52,47 +53,47 @@ import java.util.Map;
 @Stateless
 public class HashMapToKeyValPair<K, V> extends BaseKeyValueOperator<K, V>
 {
-	/**
-	 * Input port that takes a hashmap of &lt;key,value&rt;.
-	 */
-	public final transient DefaultInputPort<HashMap<K, V>> data = new DefaultInputPort<HashMap<K, V>>()
-	{
-		/**
-		 * Emits key, key/val pair, and val based on port connections
-		 */
-		@Override
-		public void process(HashMap<K, V> tuple)
-		{
-			for (Map.Entry<K, V> e : tuple.entrySet()) {
-				if (key.isConnected()) {
-					key.emit(cloneKey(e.getKey()));
-				}
-				if (val.isConnected()) {
-					val.emit(cloneValue(e.getValue()));
-				}
-				if (keyval.isConnected()) {
-					keyval.emit(new KeyValPair<K, V>(cloneKey(e.getKey()), cloneValue(e
-							.getValue())));
-				}
-			}
-		}
-	};
+  /**
+   * Input port that takes a hashmap of &lt;key,value&rt;.
+   */
+  public final transient DefaultInputPort<HashMap<K, V>> data = new DefaultInputPort<HashMap<K, V>>()
+  {
+    /**
+     * Emits key, key/val pair, and val based on port connections
+     */
+    @Override
+    public void process(HashMap<K, V> tuple)
+    {
+      for (Map.Entry<K, V> e : tuple.entrySet()) {
+        if (key.isConnected()) {
+          key.emit(cloneKey(e.getKey()));
+        }
+        if (val.isConnected()) {
+          val.emit(cloneValue(e.getValue()));
+        }
+        if (keyval.isConnected()) {
+          keyval.emit(new KeyValPair<K, V>(cloneKey(e.getKey()), cloneValue(e
+              .getValue())));
+        }
+      }
+    }
+  };
 
-	/**
-	 * Key output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<K> key = new DefaultOutputPort<K>();
+  /**
+   * Key output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<K> key = new DefaultOutputPort<K>();
 
-	/**
-	 * key/value pair output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<KeyValPair<K, V>> keyval = new DefaultOutputPort<KeyValPair<K, V>>();
+  /**
+   * key/value pair output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<KeyValPair<K, V>> keyval = new DefaultOutputPort<KeyValPair<K, V>>();
 
-	/**
-	 * Value output port.
-	 */
-	@OutputPortFieldAnnotation(optional = true)
-	public final transient DefaultOutputPort<V> val = new DefaultOutputPort<V>();
+  /**
+   * Value output port.
+   */
+  @OutputPortFieldAnnotation(optional = true)
+  public final transient DefaultOutputPort<V> val = new DefaultOutputPort<V>();
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/JsonByteArrayOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/JsonByteArrayOperator.java b/library/src/main/java/com/datatorrent/lib/stream/JsonByteArrayOperator.java
index 73ece79..b33eadc 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/JsonByteArrayOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/JsonByteArrayOperator.java
@@ -22,14 +22,14 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 
-import com.datatorrent.api.annotation.Stateless;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.netlet.util.DTThrowable;
 
 /**
@@ -70,8 +70,7 @@ public class JsonByteArrayOperator extends BaseOperator
         JSONObject value = jSONObject.optJSONObject(key);
         if (value == null) {
           map.put(insertKey, jSONObject.get(key));
-        }
-        else {
+        } else {
           getFlatMap(value, map, insertKey);
         }
       }
@@ -105,8 +104,7 @@ public class JsonByteArrayOperator extends BaseOperator
           outputFlatMap.emit(flatMap);
         }
 
-      }
-      catch (Throwable ex) {
+      } catch (Throwable ex) {
         DTThrowable.rethrow(ex);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/KeyValPairToHashMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/KeyValPairToHashMap.java b/library/src/main/java/com/datatorrent/lib/stream/KeyValPairToHashMap.java
index 29574e6..dfa3ba2 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/KeyValPairToHashMap.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/KeyValPairToHashMap.java
@@ -18,12 +18,13 @@
  */
 package com.datatorrent.lib.stream;
 
+import java.util.HashMap;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.Stateless;
 import com.datatorrent.lib.util.BaseKeyValueOperator;
 import com.datatorrent.lib.util.KeyValPair;
-import java.util.HashMap;
 
 /**
  * An implementation of BaseKeyValueOperator that converts Key Value Pair to a HashMap tuple.
@@ -45,25 +46,25 @@ import java.util.HashMap;
 @Stateless
 public class KeyValPairToHashMap<K, V> extends BaseKeyValueOperator<K, V>
 {
-	/**
-	 * Input port that takes a key value pair.
-	 */
-	public final transient DefaultInputPort<KeyValPair<K, V>> keyval = new DefaultInputPort<KeyValPair<K, V>>()
-	{
-		/**
-		 * Emits key, key/val pair, and val based on port connections
-		 */
-		@Override
-		public void process(KeyValPair<K, V> tuple)
-		{
-			HashMap<K, V> otuple = new HashMap<K, V>(1);
-			otuple.put(tuple.getKey(), tuple.getValue());
-			map.emit(otuple);
-		}
-	};
+  /**
+   * Input port that takes a key value pair.
+   */
+  public final transient DefaultInputPort<KeyValPair<K, V>> keyval = new DefaultInputPort<KeyValPair<K, V>>()
+  {
+    /**
+     * Emits key, key/val pair, and val based on port connections
+     */
+    @Override
+    public void process(KeyValPair<K, V> tuple)
+    {
+      HashMap<K, V> otuple = new HashMap<K, V>(1);
+      otuple.put(tuple.getKey(), tuple.getValue());
+      map.emit(otuple);
+    }
+  };
 
-	/**
-	 * key/value map output port.
-	 */
-	public final transient DefaultOutputPort<HashMap<K, V>> map = new DefaultOutputPort<HashMap<K, V>>();
+  /**
+   * key/value map output port.
+   */
+  public final transient DefaultOutputPort<HashMap<K, V>> map = new DefaultOutputPort<HashMap<K, V>>();
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/RoundRobinHashMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/RoundRobinHashMap.java b/library/src/main/java/com/datatorrent/lib/stream/RoundRobinHashMap.java
index cc47f5c..aee1213 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/RoundRobinHashMap.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/RoundRobinHashMap.java
@@ -18,10 +18,11 @@
  */
 package com.datatorrent.lib.stream;
 
+import java.util.HashMap;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.lib.util.BaseKeyValueOperator;
-import java.util.HashMap;
 
 /**
  * <p>
@@ -49,56 +50,56 @@ import java.util.HashMap;
  */
 public class RoundRobinHashMap<K, V> extends BaseKeyValueOperator<K, V>
 {
-	/**
-	 * Keys for round robin association.
-	 */
-	protected K[] keys;
+  /**
+   * Keys for round robin association.
+   */
+  protected K[] keys;
 
-	/**
-	 * Current key index.
-	 */
-	protected int cursor = 0;
+  /**
+   * Current key index.
+   */
+  protected int cursor = 0;
 
-	private HashMap<K, V> otuple;
+  private HashMap<K, V> otuple;
 
-	/**
-	 * Value input port.
-	 */
-	public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
-	{
-		/**
-		 * Emits key, key/val pair, and val based on port connections
-		 */
-		@Override
-		public void process(V tuple)
-		{
-			if (keys.length == 0) {
-				return;
-			}
-			if (cursor == 0) {
-				otuple = new HashMap<K, V>();
-			}
-			otuple.put(keys[cursor], tuple);
-			if (++cursor >= keys.length) {
-				map.emit(otuple);
-				cursor = 0;
-				otuple = null;
-			}
-		}
-	};
+  /**
+   * Value input port.
+   */
+  public final transient DefaultInputPort<V> data = new DefaultInputPort<V>()
+  {
+    /**
+     * Emits key, key/val pair, and val based on port connections
+     */
+    @Override
+    public void process(V tuple)
+    {
+      if (keys.length == 0) {
+        return;
+      }
+      if (cursor == 0) {
+        otuple = new HashMap<K, V>();
+      }
+      otuple.put(keys[cursor], tuple);
+      if (++cursor >= keys.length) {
+        map.emit(otuple);
+        cursor = 0;
+        otuple = null;
+      }
+    }
+  };
 
-	/**
-	 * key/value map output port.
-	 */
-	public final transient DefaultOutputPort<HashMap<K, V>> map = new DefaultOutputPort<HashMap<K, V>>();
+  /**
+   * key/value map output port.
+   */
+  public final transient DefaultOutputPort<HashMap<K, V>> map = new DefaultOutputPort<HashMap<K, V>>();
 
-	/**
-	 * Keys for round robin asspociation, set by application.
-	 *
-	 * @param keys
-	 */
-	public void setKeys(K[] keys)
-	{
-		this.keys = keys;
-	}
+  /**
+   * Keys for round robin asspociation, set by application.
+   *
+   * @param keys
+   */
+  public void setKeys(K[] keys)
+  {
+    this.keys = keys;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/StreamDuplicater.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/StreamDuplicater.java b/library/src/main/java/com/datatorrent/lib/stream/StreamDuplicater.java
index f13158e..f08b931 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/StreamDuplicater.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/StreamDuplicater.java
@@ -43,9 +43,9 @@ import com.datatorrent.lib.util.BaseKeyOperator;
 @Stateless
 public class StreamDuplicater<K> extends BaseKeyOperator<K>
 {
-	/**
-	 * Input data port.
-	 */
+  /**
+   * Input data port.
+   */
   public final transient DefaultInputPort<K> data = new DefaultInputPort<K>()
   {
     /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/stream/StreamMerger.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/stream/StreamMerger.java b/library/src/main/java/com/datatorrent/lib/stream/StreamMerger.java
index 5d4cfa6..8678e9f 100644
--- a/library/src/main/java/com/datatorrent/lib/stream/StreamMerger.java
+++ b/library/src/main/java/com/datatorrent/lib/stream/StreamMerger.java
@@ -18,10 +18,10 @@
  */
 package com.datatorrent.lib.stream;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * An implementation of BaseOperator that merges two streams with identical schema and emits the tuples to the output port in order.
@@ -41,10 +41,10 @@ import com.datatorrent.api.annotation.Stateless;
 @Stateless
 public class StreamMerger<K> extends BaseOperator
 {
-	/**
-	 * Data input port 1.
-	 */
- public final transient DefaultInputPort<K> data1 = new DefaultInputPort<K>()
+  /**
+   * Data input port 1.
+   */
+  public final transient DefaultInputPort<K> data1 = new DefaultInputPort<K>()
   {
     /**
      * Emits to port "out"

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/AbstractSqlStreamOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/AbstractSqlStreamOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/AbstractSqlStreamOperator.java
index aefd6cd..e3bba8a 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/AbstractSqlStreamOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/AbstractSqlStreamOperator.java
@@ -18,14 +18,14 @@
  */
 package com.datatorrent.lib.streamquery;
 
-import com.datatorrent.common.util.BaseOperator;
+import java.util.ArrayList;
+import java.util.HashMap;
+
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
-
-import java.util.ArrayList;
-import java.util.HashMap;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * A base implementation of a BaseOperator that is a sql stream operator.&nbsp;  Subclasses should provide the
@@ -58,7 +58,8 @@ public abstract class AbstractSqlStreamOperator extends BaseOperator
      */
     public HashMap<String, ColumnInfo> columnInfoMap = new HashMap<String, ColumnInfo>();
 
-    public InputSchema() {
+    public InputSchema()
+    {
     }
 
     public InputSchema(String name)

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/DeleteOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/DeleteOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/DeleteOperator.java
index 70e4333..77c7522 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/DeleteOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/DeleteOperator.java
@@ -20,9 +20,9 @@ package com.datatorrent.lib.streamquery;
 
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.lib.streamquery.condition.Condition;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/DerbySqlStreamOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/DerbySqlStreamOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/DerbySqlStreamOperator.java
index ac05444..2fe8bc3 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/DerbySqlStreamOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/DerbySqlStreamOperator.java
@@ -18,15 +18,20 @@
  */
 package com.datatorrent.lib.streamquery;
 
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.lib.streamquery.AbstractSqlStreamOperator.InputSchema.ColumnInfo;
-
-import java.sql.*;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.lib.streamquery.AbstractSqlStreamOperator.InputSchema.ColumnInfo;
+
 /**
  * An implementation of AbstractSqlStreamOperator that provides embedded derby sql input operator.
  * <p>
@@ -38,13 +43,14 @@ import java.util.Map;
 public class DerbySqlStreamOperator extends AbstractSqlStreamOperator
 {
   protected transient ArrayList<PreparedStatement> insertStatements = new ArrayList<PreparedStatement>(5);
-    protected List<String> execStmtStringList = new ArrayList<String>();
+  protected List<String> execStmtStringList = new ArrayList<String>();
   protected transient ArrayList<PreparedStatement> execStatements = new ArrayList<PreparedStatement>(5);
   protected transient ArrayList<PreparedStatement> deleteStatements = new ArrayList<PreparedStatement>(5);
   protected transient Connection db;
 
-  public void addExecStatementString(String stmt) {
-       this.execStmtStringList.add(stmt);
+  public void addExecStatementString(String stmt)
+  {
+    this.execStmtStringList.add(stmt);
   }
 
 
@@ -54,8 +60,7 @@ public class DerbySqlStreamOperator extends AbstractSqlStreamOperator
     System.setProperty("derby.stream.error.file", "/dev/null");
     try {
       Class.forName("org.apache.derby.jdbc.EmbeddedDriver").newInstance();
-    }
-    catch (Exception ex) {
+    } catch (Exception ex) {
       throw new RuntimeException(ex);
     }
 
@@ -74,7 +79,7 @@ public class DerbySqlStreamOperator extends AbstractSqlStreamOperator
         String columnNames = "";
         String insertQuestionMarks = "";
         int j = 0;
-        for (Map.Entry<String, ColumnInfo> entry: inputSchema.columnInfoMap.entrySet()) {
+        for (Map.Entry<String, ColumnInfo> entry : inputSchema.columnInfoMap.entrySet()) {
           if (!columnSpec.isEmpty()) {
             columnSpec += ",";
             columnNames += ",";
@@ -87,21 +92,22 @@ public class DerbySqlStreamOperator extends AbstractSqlStreamOperator
           insertQuestionMarks += "?";
           entry.getValue().bindIndex = ++j;
         }
-        String createTempTableStmt = "DECLARE GLOBAL TEMPORARY TABLE SESSION." + inputSchema.name + "(" + columnSpec + ") NOT LOGGED";
+        String createTempTableStmt =
+            "DECLARE GLOBAL TEMPORARY TABLE SESSION." + inputSchema.name + "(" + columnSpec + ") NOT LOGGED";
         st = db.prepareStatement(createTempTableStmt);
         st.execute();
         st.close();
 
-        String insertStmt = "INSERT INTO SESSION." + inputSchema.name + " (" + columnNames + ") VALUES (" + insertQuestionMarks + ")";
+        String insertStmt = "INSERT INTO SESSION." + inputSchema.name + " (" + columnNames + ") VALUES ("
+            + insertQuestionMarks + ")";
 
         insertStatements.add(i, db.prepareStatement(insertStmt));
         deleteStatements.add(i, db.prepareStatement("DELETE FROM SESSION." + inputSchema.name));
       }
-        for (String stmtStr: execStmtStringList) {
-            execStatements.add(db.prepareStatement(stmtStr));
-        }
-    }
-    catch (SQLException ex) {
+      for (String stmtStr : execStmtStringList) {
+        execStatements.add(db.prepareStatement(stmtStr));
+      }
+    } catch (SQLException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -111,8 +117,7 @@ public class DerbySqlStreamOperator extends AbstractSqlStreamOperator
   {
     try {
       db.setAutoCommit(false);
-    }
-    catch (SQLException ex) {
+    } catch (SQLException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -124,18 +129,16 @@ public class DerbySqlStreamOperator extends AbstractSqlStreamOperator
 
     PreparedStatement insertStatement = insertStatements.get(tableNum);
     try {
-      for (Map.Entry<String, Object> entry: tuple.entrySet()) {
+      for (Map.Entry<String, Object> entry : tuple.entrySet()) {
         ColumnInfo t = inputSchema.columnInfoMap.get(entry.getKey());
         if (t != null && t.bindIndex != 0) {
-          //System.out.println("Binding: "+entry.getValue().toString()+" to "+t.bindIndex);
           insertStatement.setString(t.bindIndex, entry.getValue().toString());
         }
       }
 
       insertStatement.executeUpdate();
       insertStatement.clearParameters();
-    }
-    catch (SQLException ex) {
+    } catch (SQLException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -147,48 +150,46 @@ public class DerbySqlStreamOperator extends AbstractSqlStreamOperator
       db.commit();
       if (bindings != null) {
         for (int i = 0; i < bindings.size(); i++) {
-            for (PreparedStatement stmt: execStatements) {
-                stmt.setString(i, bindings.get(i).toString());
-            }
+          for (PreparedStatement stmt : execStatements) {
+            stmt.setString(i, bindings.get(i).toString());
+          }
         }
       }
 
-
-     for (PreparedStatement stmt: execStatements) {
-          executePreparedStatement(stmt);
+      for (PreparedStatement stmt : execStatements) {
+        executePreparedStatement(stmt);
       }
-      for (PreparedStatement st: deleteStatements) {
+      for (PreparedStatement st : deleteStatements) {
         st.executeUpdate();
         st.clearParameters();
       }
-    }
-    catch (SQLException ex) {
+    } catch (SQLException ex) {
       throw new RuntimeException(ex);
     }
     bindings = null;
   }
 
-    private void executePreparedStatement(PreparedStatement statement) throws SQLException {
-        ResultSet res = statement.executeQuery();
-        ResultSetMetaData resmeta = res.getMetaData();
-        int columnCount = resmeta.getColumnCount();
-        while (res.next()) {
-            HashMap<String, Object> resultRow = new HashMap<String, Object>();
-            for (int i = 1; i <= columnCount; i++) {
-                resultRow.put(resmeta.getColumnName(i), res.getObject(i));
-            }
-            this.result.emit(resultRow);
-        }
-        statement.clearParameters();
+  private void executePreparedStatement(PreparedStatement statement) throws SQLException
+  {
+    ResultSet res = statement.executeQuery();
+    ResultSetMetaData resmeta = res.getMetaData();
+    int columnCount = resmeta.getColumnCount();
+    while (res.next()) {
+      HashMap<String, Object> resultRow = new HashMap<String, Object>();
+      for (int i = 1; i <= columnCount; i++) {
+        resultRow.put(resmeta.getColumnName(i), res.getObject(i));
+      }
+      this.result.emit(resultRow);
     }
+    statement.clearParameters();
+  }
 
   @Override
   public void teardown()
   {
     try {
       db.close();
-    }
-    catch (SQLException ex) {
+    } catch (SQLException ex) {
       throw new RuntimeException(ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/GroupByHavingOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/GroupByHavingOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/GroupByHavingOperator.java
index 63ad18a..1821953 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/GroupByHavingOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/GroupByHavingOperator.java
@@ -24,10 +24,10 @@ import java.util.Map;
 
 import javax.validation.constraints.NotNull;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OperatorAnnotation;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.lib.streamquery.condition.Condition;
 import com.datatorrent.lib.streamquery.condition.HavingCondition;
 import com.datatorrent.lib.streamquery.function.FunctionIndex;
@@ -100,14 +100,14 @@ public class GroupByHavingOperator extends BaseOperator
   {
     columnGroupIndexes.add(index);
   }
+
   public void addHavingCondition(@NotNull HavingCondition condition)
   {
     havingConditions.add(condition);
   }
 
   /**
-   * @param set
-   *          condition
+   * @param condition condition
    */
   public void setCondition(Condition condition)
   {
@@ -123,8 +123,9 @@ public class GroupByHavingOperator extends BaseOperator
     @Override
     public void process(Map<String, Object> tuple)
     {
-      if ((condition != null) && (!condition.isValidRow(tuple)))
+      if ((condition != null) && (!condition.isValidRow(tuple))) {
         return;
+      }
       rows.add(tuple);
     }
   };
@@ -193,8 +194,9 @@ public class GroupByHavingOperator extends BaseOperator
           return;
         }
       }
-      if (isValidHaving)
+      if (isValidHaving) {
         outport.emit(result);
+      }
     }
 
     rows = new ArrayList<Map<String, Object>>();
@@ -215,13 +217,13 @@ public class GroupByHavingOperator extends BaseOperator
     @Override
     public boolean equals(Object other)
     {
-      if (other instanceof MultiKeyCompare)
-        if (compareKeys.size() != ((MultiKeyCompare) other).compareKeys.size()) {
+      if (other instanceof MultiKeyCompare) {
+        if (compareKeys.size() != ((MultiKeyCompare)other).compareKeys.size()) {
           return false;
         }
+      }
       for (int i = 0; i < compareKeys.size(); i++) {
-        if (!(compareKeys.get(i).equals(((MultiKeyCompare) other).compareKeys
-            .get(i)))) {
+        if (!(compareKeys.get(i).equals(((MultiKeyCompare)other).compareKeys.get(i)))) {
           return false;
         }
       }
@@ -241,8 +243,9 @@ public class GroupByHavingOperator extends BaseOperator
     @Override
     public int compareTo(Object other)
     {
-      if (this.equals(other))
+      if (this.equals(other)) {
         return 0;
+      }
       return -1;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/InnerJoinOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/InnerJoinOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/InnerJoinOperator.java
index f5eafb4..883329e 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/InnerJoinOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/InnerJoinOperator.java
@@ -94,38 +94,37 @@ public class InnerJoinOperator implements Operator
     {
       table1.add(tuple);
       for (int j = 0; j < table2.size(); j++) {
-        if ((joinCondition == null)
-                || (joinCondition.isValidJoin(tuple, table2.get(j)))) {
+        if ((joinCondition == null) || (joinCondition.isValidJoin(tuple, table2.get(j)))) {
           joinRows(tuple, table2.get(j));
         }
       }
-		}
-	};
-
-	/**
-	 * Input port 2 that takes a map of &lt;string,object&gt;.
-	 */
-	public final transient DefaultInputPort<Map<String, Object>> inport2 = new DefaultInputPort<Map<String, Object>>() {
-		@Override
-		public void process(Map<String, Object> tuple)
-		{
-	    table2.add(tuple);
+    }
+  };
+
+  /**
+   * Input port 2 that takes a map of &lt;string,object&gt;.
+   */
+  public final transient DefaultInputPort<Map<String, Object>> inport2 = new DefaultInputPort<Map<String, Object>>()
+  {
+    @Override
+    public void process(Map<String, Object> tuple)
+    {
+      table2.add(tuple);
       for (int j = 0; j < table1.size(); j++) {
-        if ((joinCondition == null)
-                || (joinCondition.isValidJoin(table1.get(j), tuple))) {
+        if ((joinCondition == null) || (joinCondition.isValidJoin(table1.get(j), tuple))) {
           joinRows(table1.get(j), tuple);
         }
       }
-		}
-	};
+    }
+  };
 
-	/**
-	 * Output port that emits a map of &lt;string,object&gt;.
-	 */
-	public final transient DefaultOutputPort<Map<String, Object>> outport =
-			new DefaultOutputPort<Map<String, Object>>();
+  /**
+   * Output port that emits a map of &lt;string,object&gt;.
+   */
+  public final transient DefaultOutputPort<Map<String, Object>> outport =
+      new DefaultOutputPort<Map<String, Object>>();
 
-	@Override
+  @Override
   public void setup(OperatorContext arg0)
   {
     table1 = new ArrayList<Map<String, Object>>();
@@ -159,7 +158,7 @@ public class InnerJoinOperator implements Operator
 
   /**
    * Pick the supported condition. Currently only equal join is supported.
-   * @param set joinCondition
+   * @param joinCondition joinCondition
    */
   public void setJoinCondition(Condition joinCondition)
   {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/OrderByOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/OrderByOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/OrderByOperator.java
index ebc5d23..18d9928 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/OrderByOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/OrderByOperator.java
@@ -21,9 +21,9 @@ package com.datatorrent.lib.streamquery;
 import java.util.ArrayList;
 import java.util.Map;
 
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.Operator;
 import com.datatorrent.api.Operator.Unifier;
 
@@ -49,49 +49,49 @@ import com.datatorrent.api.Operator.Unifier;
  */
 public class OrderByOperator implements Operator, Unifier<Map<String, Object>>
 {
-	/**
-	 * Order by rules.
-	 */
-	ArrayList<OrderByRule<?>>	orderByRules	= new ArrayList<OrderByRule<?>>();
-
-	/**
-	 * Descending flag.
-	 */
-	private boolean isDescending;
-
-	/**
-	 * collected rows.
-	 */
-	private ArrayList<Map<String, Object>> rows;
-
-	/**
-	 * Add order by rule.
-	 */
-	public void addOrderByRule(OrderByRule<?> rule)
-	{
-		orderByRules.add(rule);
-	}
-
-	/**
+  /**
+   * Order by rules.
+   */
+  ArrayList<OrderByRule<?>> orderByRules = new ArrayList<OrderByRule<?>>();
+
+  /**
+   * Descending flag.
+   */
+  private boolean isDescending;
+
+  /**
+   * collected rows.
+   */
+  private ArrayList<Map<String, Object>> rows;
+
+  /**
+   * Add order by rule.
+   */
+  public void addOrderByRule(OrderByRule<?> rule)
+  {
+    orderByRules.add(rule);
+  }
+
+  /**
    * @return isDescending
    */
   public boolean isDescending()
   {
-	  return isDescending;
+    return isDescending;
   }
 
-	/**
-   * @param set isDescending
+  /**
+   * @param isDescending isDescending
    */
   public void setDescending(boolean isDescending)
   {
-	  this.isDescending = isDescending;
+    this.isDescending = isDescending;
   }
 
-	@Override
+  @Override
   public void process(Map<String, Object> tuple)
   {
-	  rows.add(tuple);
+    rows.add(tuple);
   }
 
   @Override
@@ -103,13 +103,17 @@ public class OrderByOperator implements Operator, Unifier<Map<String, Object>>
   @Override
   public void endWindow()
   {
-    for (int i=0; i < orderByRules.size(); i++) {
+    for (int i = 0; i < orderByRules.size(); i++) {
       rows = orderByRules.get(i).sort(rows);
     }
     if (isDescending) {
-      for (int i=0; i < rows.size(); i++)  outport.emit(rows.get(i));
+      for (int i = 0; i < rows.size(); i++) {
+        outport.emit(rows.get(i));
+      }
     } else {
-      for (int i=rows.size()-1; i >= 0;  i--)  outport.emit(rows.get(i));
+      for (int i = rows.size() - 1; i >= 0; i--) {
+        outport.emit(rows.get(i));
+      }
     }
   }
 
@@ -130,7 +134,8 @@ public class OrderByOperator implements Operator, Unifier<Map<String, Object>>
   /**
    * Input port that takes a map of &lt;string,object&gt;.
    */
-  public final transient DefaultInputPort<Map<String, Object>> inport = new DefaultInputPort<Map<String, Object>>() {
+  public final transient DefaultInputPort<Map<String, Object>> inport = new DefaultInputPort<Map<String, Object>>()
+  {
     @Override
     public void process(Map<String, Object> tuple)
     {
@@ -141,18 +146,19 @@ public class OrderByOperator implements Operator, Unifier<Map<String, Object>>
   /**
    * Output port that emits a map of &lt;string,object&gt;.
    */
-  public final transient DefaultOutputPort<Map<String, Object>> outport =  new DefaultOutputPort<Map<String, Object>>()
-      {
-         @Override
-         public Unifier<Map<String, Object>> getUnifier() {
-           OrderByOperator unifier = new OrderByOperator();
-           for (int i=0; i < getOrderByRules().size(); i++) {
-             unifier.addOrderByRule(getOrderByRules().get(i));
-           }
-           unifier.setDescending(isDescending);
-           return unifier;
-         }
-      };
+  public final transient DefaultOutputPort<Map<String, Object>> outport = new DefaultOutputPort<Map<String, Object>>()
+  {
+    @Override
+    public Unifier<Map<String, Object>> getUnifier()
+    {
+      OrderByOperator unifier = new OrderByOperator();
+      for (int i = 0; i < getOrderByRules().size(); i++) {
+        unifier.addOrderByRule(getOrderByRules().get(i));
+      }
+      unifier.setDescending(isDescending);
+      return unifier;
+    }
+  };
 
   /**
    * @return the orderByRules

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/OrderByRule.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/OrderByRule.java b/library/src/main/java/com/datatorrent/lib/streamquery/OrderByRule.java
index 0b16065..8573903 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/OrderByRule.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/OrderByRule.java
@@ -58,7 +58,7 @@ public class OrderByRule<T extends Comparable>
     for (int i = 0; i < rows.size(); i++) {
       Map<String, Object> row = rows.get(i);
       if (row.containsKey(columnName)) {
-        T value = (T) row.get(columnName);
+        T value = (T)row.get(columnName);
         ArrayList<Map<String, Object>> list;
         if (sorted.containsKey(value)) {
           list = sorted.get(value);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/OuterJoinOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/OuterJoinOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/OuterJoinOperator.java
index 1e1dcfb..0494bfb 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/OuterJoinOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/OuterJoinOperator.java
@@ -88,8 +88,7 @@ public class OuterJoinOperator extends InnerJoinOperator
       for (int i = 0; i < table2.size(); i++) {
         boolean merged = false;
         for (int j = 0; j < table1.size(); j++) {
-          if ((joinCondition == null)
-              || (joinCondition.isValidJoin(table1.get(j), table2.get(i)))) {
+          if ((joinCondition == null) || (joinCondition.isValidJoin(table1.get(j), table2.get(i)))) {
             merged = true;
           }
         }
@@ -104,6 +103,7 @@ public class OuterJoinOperator extends InnerJoinOperator
   {
     isLeftJoin = true;
   }
+
   public void setRighttJoin()
   {
     isLeftJoin = false;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/SelectFunctionOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/SelectFunctionOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/SelectFunctionOperator.java
index c1c411c..77616f3 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/SelectFunctionOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/SelectFunctionOperator.java
@@ -95,7 +95,9 @@ public class SelectFunctionOperator implements Operator
   @Override
   public void endWindow()
   {
-    if (functions.size() == 0) return;
+    if (functions.size() == 0) {
+      return;
+    }
     Map<String, Object>  collect = new HashMap<String, Object>();
     for (FunctionIndex function : functions) {
       try {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/SelectOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/SelectOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/SelectOperator.java
index b02e40f..4dbc1f0 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/SelectOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/SelectOperator.java
@@ -22,9 +22,9 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.lib.streamquery.condition.Condition;
 import com.datatorrent.lib.streamquery.index.Index;
 
@@ -89,8 +89,9 @@ public class SelectOperator extends BaseOperator
     @Override
     public void process(Map<String, Object> tuple)
     {
-      if ((condition != null) && (!condition.isValidRow(tuple)))
+      if ((condition != null) && (!condition.isValidRow(tuple))) {
         return;
+      }
       if (indexes.size() == 0) {
         outport.emit(tuple);
         return;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/SelectTopOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/SelectTopOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/SelectTopOperator.java
index 365642f..c3ae083 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/SelectTopOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/SelectTopOperator.java
@@ -58,7 +58,8 @@ public class SelectTopOperator implements Operator
   /**
    * Input port that takes a map of &lt;string,object&gt;.
    */
-  public final transient DefaultInputPort<Map<String, Object>> inport = new DefaultInputPort<Map<String, Object>>() {
+  public final transient DefaultInputPort<Map<String, Object>> inport = new DefaultInputPort<Map<String, Object>>()
+  {
     @Override
     public void process(Map<String, Object> tuple)
     {
@@ -89,13 +90,13 @@ public class SelectTopOperator implements Operator
   @Override
   public void endWindow()
   {
-      int numEmits = topValue;
-      if (isPercentage) {
-        numEmits = list.size() * (topValue/100);
-      }
-      for (int i=0; (i < numEmits)&&(i < list.size()); i++) {
-        outport.emit(list.get(i));
-      }
+    int numEmits = topValue;
+    if (isPercentage) {
+      numEmits = list.size() * (topValue / 100);
+    }
+    for (int i = 0; (i < numEmits) && (i < list.size()); i++) {
+      outport.emit(list.get(i));
+    }
   }
 
   public int getTopValue()

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/UpdateOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/UpdateOperator.java b/library/src/main/java/com/datatorrent/lib/streamquery/UpdateOperator.java
index e130515..6724a7e 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/UpdateOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/UpdateOperator.java
@@ -21,9 +21,9 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 import java.util.Map;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.lib.streamquery.condition.Condition;
 
 /**
@@ -54,33 +54,36 @@ public class UpdateOperator extends BaseOperator
    */
   Map<String, Object> updates = new HashMap<String, Object>();
 
-	/**
-	 *  condition.
-	 */
-	private Condition condition = null;
+  /**
+   *  condition.
+   */
+  private Condition condition = null;
 
-	/**
-	 * set condition.
-	 */
-	public void setCondition(Condition condition)
-	{
-		this.condition = condition;
-	}
+  /**
+   * set condition.
+   */
+  public void setCondition(Condition condition)
+  {
+    this.condition = condition;
+  }
 
   /**
    * Input port that takes a map of &lt;string,object&gt;.
    */
-  public final transient DefaultInputPort<Map<String, Object>> inport = new DefaultInputPort<Map<String, Object>>() {
+  public final transient DefaultInputPort<Map<String, Object>> inport = new DefaultInputPort<Map<String, Object>>()
+  {
     @Override
     public void process(Map<String, Object> tuple)
     {
-      if ((condition != null)&&(!condition.isValidRow(tuple)))return;
+      if ((condition != null) && (!condition.isValidRow(tuple))) {
+        return;
+      }
       if (updates.size() == 0) {
         outport.emit(tuple);
         return;
       }
       Map<String, Object> result = new HashMap<String, Object>();
-      for(Map.Entry<String, Object> entry : tuple.entrySet()) {
+      for (Map.Entry<String, Object> entry : tuple.entrySet()) {
         if (updates.containsKey(entry.getKey())) {
           result.put(entry.getKey(), updates.get(entry.getKey()));
         } else {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/condition/BetweenCondition.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/condition/BetweenCondition.java b/library/src/main/java/com/datatorrent/lib/streamquery/condition/BetweenCondition.java
index efcb62c..43cdc72 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/condition/BetweenCondition.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/condition/BetweenCondition.java
@@ -74,11 +74,19 @@ public class BetweenCondition  extends Condition
   @Override
   public boolean isValidRow(@NotNull Map<String, Object> row)
   {
-    if (!row.containsKey(column)) return false;
+    if (!row.containsKey(column)) {
+      return false;
+    }
     Object value = row.get(column);
-    if (value == null) return false;
-    if (((Comparable)value).compareTo((Comparable)leftValue) < 0) return false;
-    if (((Comparable)value).compareTo((Comparable)rightValue) > 0) return false;
+    if (value == null) {
+      return false;
+    }
+    if (((Comparable)value).compareTo((Comparable)leftValue) < 0) {
+      return false;
+    }
+    if (((Comparable)value).compareTo((Comparable)rightValue) > 0) {
+      return false;
+    }
     return true;
   }
 
@@ -88,7 +96,7 @@ public class BetweenCondition  extends Condition
   @Override
   public boolean isValidJoin(@NotNull Map<String, Object> row1, Map<String, Object> row2)
   {
-    assert(false);
+    assert (false);
     return false;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/condition/CompoundCondition.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/condition/CompoundCondition.java b/library/src/main/java/com/datatorrent/lib/streamquery/condition/CompoundCondition.java
index 2caadc6..b4bd3ed 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/condition/CompoundCondition.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/condition/CompoundCondition.java
@@ -59,10 +59,12 @@ public class CompoundCondition extends Condition
 
   /**
    * Constructor for logical or metric.
+   *
    * @param leftCondition  Left validate row condition, must be non null. <br>
-   * @param rightCondition  Right validate row condition, must be non null. <br>
+   * @param rightCondition Right validate row condition, must be non null. <br>
    */
-  public CompoundCondition(Condition leftCondition, Condition rightCondition) {
+  public CompoundCondition(Condition leftCondition, Condition rightCondition)
+  {
     this.leftCondition = leftCondition;
     this.rightCondition = rightCondition;
   }
@@ -70,11 +72,13 @@ public class CompoundCondition extends Condition
   /**
    * Constructor for logical and metric if logical and parameter is true.
    * <br>
+   *
    * @param leftCondition  Left validate row condition, must be non null. <br>
-   * @param rightCondition  Right validate row condition, must be non null. <br>
-   * @param isLogicalAnd  Logical AND if true.
+   * @param rightCondition Right validate row condition, must be non null. <br>
+   * @param isLogicalAnd   Logical AND if true.
    */
-  public CompoundCondition(Condition leftCondition, Condition rightCondition, boolean isLogicalAnd) {
+  public CompoundCondition(Condition leftCondition, Condition rightCondition, boolean isLogicalAnd)
+  {
     this.leftCondition = leftCondition;
     this.rightCondition = rightCondition;
     logicalOr = !isLogicalAnd;
@@ -84,7 +88,7 @@ public class CompoundCondition extends Condition
   public boolean isValidRow(Map<String, Object> row)
   {
     if (logicalOr) {
-       return leftCondition.isValidRow(row) || rightCondition.isValidRow(row);
+      return leftCondition.isValidRow(row) || rightCondition.isValidRow(row);
     } else {
       return leftCondition.isValidRow(row) && rightCondition.isValidRow(row);
     }
@@ -117,7 +121,8 @@ public class CompoundCondition extends Condition
     this.rightCondition = rightCondition;
   }
 
-  public void setLogicalAnd() {
+  public void setLogicalAnd()
+  {
     this.logicalOr = false;
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/condition/Condition.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/condition/Condition.java b/library/src/main/java/com/datatorrent/lib/streamquery/condition/Condition.java
index c0a4fde..86d5581 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/condition/Condition.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/condition/Condition.java
@@ -31,13 +31,14 @@ import javax.validation.constraints.NotNull;
  * @tags sql condition, filter
  * @since 0.3.3
  */
-abstract public class Condition
+public abstract class Condition
 {
-	/**
-	 * Row containing column/value map.
-	 * @return row validation status.
-	 */
-  abstract public boolean isValidRow(@NotNull Map<String, Object> row);
+  /**
+   * Row containing column/value map.
+   *
+   * @return row validation status.
+   */
+  public abstract boolean isValidRow(@NotNull Map<String, Object> row);
 
   /**
    * Filter valid rows only.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/condition/EqualValueCondition.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/condition/EqualValueCondition.java b/library/src/main/java/com/datatorrent/lib/streamquery/condition/EqualValueCondition.java
index fbcb9b0..bb478cf 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/condition/EqualValueCondition.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/condition/EqualValueCondition.java
@@ -57,23 +57,28 @@ public class EqualValueCondition extends Condition
   public boolean isValidRow(Map<String, Object> row)
   {
     // no conditions
-    if (equalMap.size() == 0)
+    if (equalMap.size() == 0) {
       return true;
+    }
 
     // compare each condition value
     for (Map.Entry<String, Object> entry : equalMap.entrySet()) {
-      if (!row.containsKey(entry.getKey()))
+      if (!row.containsKey(entry.getKey())) {
         return false;
+      }
       Object value = row.get(entry.getKey());
       if (entry.getValue() == null) {
-        if (value == null)
+        if (value == null) {
           return true;
+        }
         return false;
       }
-      if (value == null)
+      if (value == null) {
         return false;
-      if (!entry.getValue().equals(value))
+      }
+      if (!entry.getValue().equals(value)) {
         return false;
+      }
     }
     return true;
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCompareValue.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCompareValue.java b/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCompareValue.java
index b0a3127..7877053 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCompareValue.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCompareValue.java
@@ -70,8 +70,8 @@ public class HavingCompareValue<T extends Comparable>   extends HavingCondition
   @Override
   public boolean isValidAggregate(@NotNull ArrayList<Map<String, Object>> rows) throws Exception
   {
-      Object computed = aggregateIndex.compute(rows);
-      return (compareType == compareValue.compareTo(computed));
+    Object computed = aggregateIndex.compute(rows);
+    return (compareType == compareValue.compareTo(computed));
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCondition.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCondition.java b/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCondition.java
index 89451e2..6dac690 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCondition.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/condition/HavingCondition.java
@@ -42,14 +42,15 @@ public abstract class HavingCondition
   protected FunctionIndex  aggregateIndex = null;
 
   /**
-   * @param aggregateIndex  Aggregate index to be validated.
+   * @param aggregateIndex Aggregate index to be validated.
    */
-  public HavingCondition(FunctionIndex  aggregateIndex) {
+  public HavingCondition(FunctionIndex aggregateIndex)
+  {
     this.aggregateIndex = aggregateIndex;
   }
 
   /**
    *  Check if aggregate is valid.
    */
-  abstract public boolean isValidAggregate(@NotNull ArrayList<Map<String, Object>> rows) throws Exception;
+  public abstract boolean isValidAggregate(@NotNull ArrayList<Map<String, Object>> rows) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/condition/InCondition.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/condition/InCondition.java b/library/src/main/java/com/datatorrent/lib/streamquery/condition/InCondition.java
index 0d5f5c2..236f3b1 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/condition/InCondition.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/condition/InCondition.java
@@ -50,16 +50,19 @@ public class InCondition extends Condition
   private Set<Object> inValues = new HashSet<Object>();
 
   /**
-   * @param  column Column name for which value is checked in values set.
+   * @param column Column name for which value is checked in values set.
    */
-  public InCondition(@NotNull String column) {
+  public InCondition(@NotNull String column)
+  {
     this.column = column;
   }
 
   @Override
   public boolean isValidRow(@NotNull Map<String, Object> row)
   {
-    if (!row.containsKey(column)) return false;
+    if (!row.containsKey(column)) {
+      return false;
+    }
     return inValues.contains(row.get(column));
   }
 
@@ -79,7 +82,8 @@ public class InCondition extends Condition
     this.column = column;
   }
 
-  public void addInValue(Object value) {
+  public void addInValue(Object value)
+  {
     this.inValues.add(value);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/condition/JoinColumnEqualCondition.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/condition/JoinColumnEqualCondition.java b/library/src/main/java/com/datatorrent/lib/streamquery/condition/JoinColumnEqualCondition.java
index f3b829a..d350edc 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/condition/JoinColumnEqualCondition.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/condition/JoinColumnEqualCondition.java
@@ -48,7 +48,8 @@ public class JoinColumnEqualCondition  extends Condition
   @NotNull
   private String column2;
 
-  public JoinColumnEqualCondition(@NotNull String column1,@NotNull String column2) {
+  public JoinColumnEqualCondition(@NotNull String column1, @NotNull String column2)
+  {
     this.column1 = column1;
     this.column2 = column2;
   }
@@ -59,7 +60,7 @@ public class JoinColumnEqualCondition  extends Condition
   @Override
   public boolean isValidRow(Map<String, Object> row)
   {
-    assert(false);
+    assert (false);
     return false;
   }
 
@@ -69,7 +70,9 @@ public class JoinColumnEqualCondition  extends Condition
   @Override
   public boolean isValidJoin(Map<String, Object> row1, Map<String, Object> row2)
   {
-    if (!row1.containsKey(column1) || !row2.containsKey(column2)) return false;
+    if (!row1.containsKey(column1) || !row2.containsKey(column2)) {
+      return false;
+    }
     Object value1 = row1.get(column1);
     Object value2 = row2.get(column2);
     return value1.equals(value2);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/condition/LikeCondition.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/condition/LikeCondition.java b/library/src/main/java/com/datatorrent/lib/streamquery/condition/LikeCondition.java
index f879cd6..b3d7174 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/condition/LikeCondition.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/condition/LikeCondition.java
@@ -54,7 +54,8 @@ public class LikeCondition extends Condition
    * @param column Column to be matched with regular expression, must be non-null.
    * @param pattern Regular expression pattern, must be non-null.
    */
-  public LikeCondition(@NotNull String column,@NotNull String pattern) {
+  public LikeCondition(@NotNull String column,@NotNull String pattern)
+  {
     setColumn(column);
     setPattern(pattern);
   }
@@ -66,10 +67,11 @@ public class LikeCondition extends Condition
   @Override
   public boolean isValidRow(Map<String, Object> row)
   {
-    if (!row.containsKey(column)) return false;
-    Matcher match = pattern.matcher((CharSequence) row.get(column));
-    if (!match.find()) return false;
-    return true;
+    if (!row.containsKey(column)) {
+      return false;
+    }
+    Matcher match = pattern.matcher((CharSequence)row.get(column));
+    return match.find();
   }
 
   /**
@@ -78,7 +80,7 @@ public class LikeCondition extends Condition
   @Override
   public boolean isValidJoin(Map<String, Object> row1, Map<String, Object> row2)
   {
-    assert(false);
+    assert (false);
     return false;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/function/AverageFunction.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/function/AverageFunction.java b/library/src/main/java/com/datatorrent/lib/streamquery/function/AverageFunction.java
index 43223d1..e212ff8 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/function/AverageFunction.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/function/AverageFunction.java
@@ -55,12 +55,14 @@ public class AverageFunction  extends FunctionIndex
   @Override
   public Object compute(@NotNull ArrayList<Map<String, Object>> rows) throws Exception
   {
-    if (rows.size() == 0) return 0.0;
+    if (rows.size() == 0) {
+      return 0.0;
+    }
     double sum = 0.0;
     for (Map<String, Object> row : rows) {
       sum += ((Number)row.get(column)).doubleValue();
     }
-    return sum/rows.size();
+    return sum / rows.size();
   }
 
   /**
@@ -70,7 +72,9 @@ public class AverageFunction  extends FunctionIndex
   @Override
   protected String aggregateName()
   {
-    if (!StringUtils.isEmpty(alias)) return alias;
+    if (!StringUtils.isEmpty(alias)) {
+      return alias;
+    }
     return "AVG(" + column + ")";
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/streamquery/function/CountFunction.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/streamquery/function/CountFunction.java b/library/src/main/java/com/datatorrent/lib/streamquery/function/CountFunction.java
index 350a56a..dafe54e 100644
--- a/library/src/main/java/com/datatorrent/lib/streamquery/function/CountFunction.java
+++ b/library/src/main/java/com/datatorrent/lib/streamquery/function/CountFunction.java
@@ -57,10 +57,14 @@ public class CountFunction extends FunctionIndex
   @Override
   public Object compute(ArrayList<Map<String, Object>> rows) throws Exception
   {
-    if (column.equals("*")) return rows.size();
+    if (column.equals("*")) {
+      return rows.size();
+    }
     long count = 0;
     for (Map<String, Object> row : rows) {
-      if (row.containsKey(column) && (row.get(column) != null)) count++;
+      if (row.containsKey(column) && (row.get(column) != null)) {
+        count++;
+      }
     }
     return count;
   }
@@ -72,7 +76,9 @@ public class CountFunction extends FunctionIndex
   @Override
   protected String aggregateName()
   {
-    if (!StringUtils.isEmpty(alias)) return alias;
+    if (!StringUtils.isEmpty(alias)) {
+      return alias;
+    }
     return "COUNT(" + column + ")";
   }
 


[07/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalBatchOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalBatchOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalBatchOutputOperatorTest.java
index d78df94..63c75ef 100644
--- a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalBatchOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalBatchOutputOperatorTest.java
@@ -18,20 +18,33 @@
  */
 package com.datatorrent.lib.db.jdbc;
 
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Random;
+
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.Operator.ProcessingMode;
-import com.datatorrent.netlet.util.DTThrowable;
-import static com.datatorrent.lib.db.jdbc.JdbcNonTransactionalOutputOperatorTest.*;
 import com.datatorrent.lib.db.jdbc.JdbcNonTransactionalOutputOperatorTest.TestEvent;
+import com.datatorrent.lib.helper.OperatorContextTestHelper;
+import com.datatorrent.netlet.util.DTThrowable;
+
+import static com.datatorrent.lib.db.jdbc.JdbcNonTransactionalOutputOperatorTest.APP_ID;
+import static com.datatorrent.lib.db.jdbc.JdbcNonTransactionalOutputOperatorTest.OPERATOR_ID;
+import static com.datatorrent.lib.db.jdbc.JdbcNonTransactionalOutputOperatorTest.TABLE_NAME;
 import static com.datatorrent.lib.db.jdbc.JdbcOperatorTest.DB_DRIVER;
 import static com.datatorrent.lib.db.jdbc.JdbcOperatorTest.URL;
-import com.datatorrent.lib.helper.OperatorContextTestHelper;
-import java.sql.*;
-import java.util.Random;
-import org.junit.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Test for {@link AbstractJdbcNonTransactionableBatchOutputOperator}
@@ -54,17 +67,7 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
     try {
       Class.forName(DB_DRIVER).newInstance();
       con = DriverManager.getConnection(URL);
-    }
-    catch (SQLException ex) {
-      DTThrowable.rethrow(ex);
-    }
-    catch (ClassNotFoundException ex) {
-      DTThrowable.rethrow(ex);
-    }
-    catch (InstantiationException ex) {
-      DTThrowable.rethrow(ex);
-    }
-    catch (IllegalAccessException ex) {
+    } catch (SQLException | InstantiationException | IllegalAccessException | ClassNotFoundException ex) {
       DTThrowable.rethrow(ex);
     }
   }
@@ -77,8 +80,7 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
 
     try {
       con.close();
-    }
-    catch (SQLException ex) {
+    } catch (SQLException ex) {
       DTThrowable.rethrow(ex);
     }
   }
@@ -108,8 +110,7 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
         int count = resultSet.getInt(1);
         stmt.close();
         return count;
-      }
-      catch (SQLException e) {
+      } catch (SQLException e) {
         throw new RuntimeException("fetching count", e);
       }
     }
@@ -145,55 +146,40 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    Assert.assertEquals("Commit window id ",
-                        1,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should not be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 1, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should not be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(2);
 
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
 
-    Assert.assertEquals("Commit window id ",
-                        2,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should not be written",
-                        2 * BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 2, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should not be written", 2 * BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.teardown();
   }
@@ -207,36 +193,26 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        2 * BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", 2 * BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.getStore().disconnect();
 
@@ -249,46 +225,33 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
     OperatorContextTestHelper.TestIdOperatorContext context = new OperatorContextTestHelper.TestIdOperatorContext(OPERATOR_ID, attributeMap);
     outputOperator.setup(context);
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        2* BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", 2 * BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        2 * BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", 2 * BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    Assert.assertEquals("Commit window id ",
-                        1,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        3 * BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 1, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", 3 * BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
   }
 
   @Test
@@ -300,36 +263,26 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.getStore().disconnect();
 
@@ -339,49 +292,38 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
     attributeMap.put(OperatorContext.PROCESSING_MODE, ProcessingMode.AT_LEAST_ONCE);
     attributeMap.put(OperatorContext.ACTIVATION_WINDOW_ID, 0L);
     attributeMap.put(DAG.APPLICATION_ID, APP_ID);
-    OperatorContextTestHelper.TestIdOperatorContext context = new OperatorContextTestHelper.TestIdOperatorContext(OPERATOR_ID, attributeMap);
+    OperatorContextTestHelper.TestIdOperatorContext context = new OperatorContextTestHelper.TestIdOperatorContext(
+        OPERATOR_ID, attributeMap);
+
     outputOperator.setup(context);
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    Assert.assertEquals("Commit window id ",
-                        1,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        2 * BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 1, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", 2 * BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
   }
 
   @Test
@@ -393,35 +335,25 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        2 * BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", 2 * BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.getStore().disconnect();
 
@@ -436,20 +368,15 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
 
     outputOperator.beginWindow(2);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    Assert.assertEquals("Commit window id ",
-                        2,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        3 * BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 2, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", 3 * BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
   }
 
   @Test
@@ -461,36 +388,26 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.getStore().disconnect();
 
@@ -503,28 +420,20 @@ public class JdbcNonTransactionalBatchOutputOperatorTest
     OperatorContextTestHelper.TestIdOperatorContext context = new OperatorContextTestHelper.TestIdOperatorContext(OPERATOR_ID, attributeMap);
     outputOperator.setup(context);
 
-    Assert.assertEquals("Commit window id ",
-                        0,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 0, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
 
     outputOperator.beginWindow(2);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.input.put(new TestEvent(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    Assert.assertEquals("Commit window id ",
-                        2,
-                        outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
-    Assert.assertEquals("Batch should be written",
-                        2 * BATCH_SIZE,
-                        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
+    Assert.assertEquals("Commit window id ", 2, outputOperator.getStore().getCommittedWindowId(APP_ID, OPERATOR_ID));
+    Assert.assertEquals("Batch should be written", 2 * BATCH_SIZE,
+        outputOperator.getNumOfEventsInStore(outputOperator.getStore().connection));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalOutputOperatorTest.java
index d539aaa..9880aae 100644
--- a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalOutputOperatorTest.java
@@ -32,10 +32,11 @@ import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+
 import com.datatorrent.api.DAG;
-import com.datatorrent.netlet.util.DTThrowable;
 import com.datatorrent.lib.helper.OperatorContextTestHelper;
-import com.google.common.collect.Lists;
+import com.datatorrent.netlet.util.DTThrowable;
 
 /**
  * Test for {@link AbstractJdbcNonTransactionableOutputOperator Operator}
@@ -70,8 +71,7 @@ public class JdbcNonTransactionalOutputOperatorTest
 
       String createTable = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME + " (ID INTEGER)";
       stmt.executeUpdate(createTable);
-    }
-    catch (Throwable e) {
+    } catch (Throwable e) {
       DTThrowable.rethrow(e);
     }
   }
@@ -84,8 +84,7 @@ public class JdbcNonTransactionalOutputOperatorTest
 
       String cleanTable = "delete from " + TABLE_NAME;
       stmt.executeUpdate(cleanTable);
-    }
-    catch (SQLException e) {
+    } catch (SQLException e) {
       throw new RuntimeException(e);
     }
   }
@@ -122,12 +121,11 @@ public class JdbcNonTransactionalOutputOperatorTest
         String countQuery = "SELECT * FROM " + TABLE_NAME;
         ResultSet resultSet = stmt.executeQuery(countQuery);
         int count = 0;
-        while(resultSet.next()) {
+        while (resultSet.next()) {
           count++;
         }
         return count;
-      }
-      catch (SQLException e) {
+      } catch (SQLException e) {
         throw new RuntimeException("fetching count", e);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStoreTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStoreTest.java b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStoreTest.java
index 56359fb..ef8f9a0 100644
--- a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStoreTest.java
+++ b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcNonTransactionalStoreTest.java
@@ -32,8 +32,7 @@ public class JdbcNonTransactionalStoreTest
     JdbcNonTransactionalStore jdbcNonTransactionalStore = new JdbcNonTransactionalStore();
     try {
       jdbcNonTransactionalStore.beginTransaction();
-    }
-    catch(RuntimeException e) {
+    } catch (RuntimeException e) {
       return;
     }
     Assert.fail("Exception should be thrown");
@@ -45,8 +44,7 @@ public class JdbcNonTransactionalStoreTest
     JdbcNonTransactionalStore jdbcNonTransactionalStore = new JdbcNonTransactionalStore();
     try {
       jdbcNonTransactionalStore.commitTransaction();
-    }
-    catch(RuntimeException e) {
+    } catch (RuntimeException e) {
       return;
     }
     Assert.fail("Exception should be thrown");
@@ -58,8 +56,7 @@ public class JdbcNonTransactionalStoreTest
     JdbcNonTransactionalStore jdbcNonTransactionalStore = new JdbcNonTransactionalStore();
     try {
       jdbcNonTransactionalStore.rollbackTransaction();
-    }
-    catch(RuntimeException e) {
+    } catch (RuntimeException e) {
       return;
     }
     Assert.fail("Exception should be thrown");
@@ -71,8 +68,7 @@ public class JdbcNonTransactionalStoreTest
     JdbcNonTransactionalStore jdbcNonTransactionalStore = new JdbcNonTransactionalStore();
     try {
       jdbcNonTransactionalStore.isInTransaction();
-    }
-    catch(RuntimeException e) {
+    } catch (RuntimeException e) {
       return;
     }
     Assert.fail("Exception should be thrown");

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/formatter/JsonFormatterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/formatter/JsonFormatterTest.java b/library/src/test/java/com/datatorrent/lib/formatter/JsonFormatterTest.java
index d9daf97..98be88c 100644
--- a/library/src/test/java/com/datatorrent/lib/formatter/JsonFormatterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/formatter/JsonFormatterTest.java
@@ -25,18 +25,22 @@ import java.io.PrintStream;
 import java.util.Date;
 import java.util.List;
 
-import org.apache.commons.io.FileUtils;
 import org.joda.time.DateTime;
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.runner.Description;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.commons.io.FileUtils;
+
+import com.google.common.collect.Lists;
 
 import com.datatorrent.lib.io.fs.AbstractFileOutputOperatorTest.FSTestWatcher;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.TestUtils;
 import com.datatorrent.lib.util.TestUtils.TestInfo;
-import com.google.common.collect.Lists;
 
 public class JsonFormatterTest
 {
@@ -150,7 +154,7 @@ public class JsonFormatterTest
     Assert.assertEquals(1, validDataSink.collectedTuples.size());
     Assert.assertEquals(0, invalidDataSink.collectedTuples.size());
     String expectedJSONString = "{\"a\":0,\"b\":0,\"c\":null,\"d\":null,\"date\":null}";
-    System.out.println(validDataSink.collectedTuples.get(0));
+    LOG.debug("{}", validDataSink.collectedTuples.get(0));
     Assert.assertEquals(expectedJSONString, validDataSink.collectedTuples.get(0));
   }
 
@@ -199,4 +203,6 @@ public class JsonFormatterTest
   {
   }
 
+  private static final Logger LOG = LoggerFactory.getLogger(JsonFormatterTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java b/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java
index 50ed3bd..bb51ca4 100644
--- a/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java
@@ -33,12 +33,13 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestWatcher;
 import org.junit.runner.Description;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.esotericsoftware.kryo.Kryo;
 import com.esotericsoftware.kryo.io.Input;
 import com.esotericsoftware.kryo.io.Output;
 
-import com.datatorrent.lib.parser.XmlParser;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.TestUtils;
 
@@ -76,6 +77,7 @@ public class XmlFormatterTest
     }
 
   }
+
   @Test
   public void testOperatorSerialization()
   {
@@ -162,7 +164,7 @@ public class XmlFormatterTest
 
     operator.setup(null);
     operator.in.process(e);
-    System.out.println(validDataSink.collectedTuples.get(0));
+    LOG.debug("{}", validDataSink.collectedTuples.get(0));
     Assert.assertEquals(1, validDataSink.collectedTuples.size());
     Assert.assertEquals(0, invalidDataSink.collectedTuples.size());
     String expected = "<EmployeeBean>" + "<name>john</name>"
@@ -202,7 +204,7 @@ public class XmlFormatterTest
 
   }
 
-  @XmlType (propOrder={"name","dept","eid", "dateOfJoining", "address"})
+  @XmlType(propOrder = {"name", "dept", "eid", "dateOfJoining", "address"})
   public static class EmployeeBean
   {
 
@@ -292,4 +294,6 @@ public class XmlFormatterTest
 
   }
 
+  private static final Logger LOG = LoggerFactory.getLogger(XmlFormatterTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/helper/OperatorContextTestHelper.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/helper/OperatorContextTestHelper.java b/library/src/test/java/com/datatorrent/lib/helper/OperatorContextTestHelper.java
index d8138d5..2ece6b2 100644
--- a/library/src/test/java/com/datatorrent/lib/helper/OperatorContextTestHelper.java
+++ b/library/src/test/java/com/datatorrent/lib/helper/OperatorContextTestHelper.java
@@ -34,7 +34,7 @@ import com.datatorrent.api.Context.OperatorContext;
  */
 public class OperatorContextTestHelper
 {
-  private final static ThreadLocal<DateFormat> DATE_FORMAT_THREAD_LOCAL = new ThreadLocal<DateFormat>()
+  private static final ThreadLocal<DateFormat> DATE_FORMAT_THREAD_LOCAL = new ThreadLocal<DateFormat>()
   {
     @Override
     protected DateFormat initialValue()

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/helper/SamplePubSubWebSocketServlet.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/helper/SamplePubSubWebSocketServlet.java b/library/src/test/java/com/datatorrent/lib/helper/SamplePubSubWebSocketServlet.java
index 2cc0e1d..9d501aa 100644
--- a/library/src/test/java/com/datatorrent/lib/helper/SamplePubSubWebSocketServlet.java
+++ b/library/src/test/java/com/datatorrent/lib/helper/SamplePubSubWebSocketServlet.java
@@ -65,11 +65,9 @@ public class SamplePubSubWebSocketServlet extends WebSocketServlet
           if (topic != null) {
             subscriber = this;
           }
-        }
-        else if (type.equals("unsubscribe")) {
+        } else if (type.equals("unsubscribe")) {
           subscriber = null;
-        }
-        else if (type.equals("publish")) {
+        } else if (type.equals("publish")) {
           Object data = map.get("data");
           if (data != null) {
             if (subscriber != null) {
@@ -77,8 +75,7 @@ public class SamplePubSubWebSocketServlet extends WebSocketServlet
             }
           }
         }
-      }
-      catch (Exception ex) {
+      } catch (Exception ex) {
         LOG.warn("Data read error", ex);
       }
     }
@@ -109,8 +106,7 @@ public class SamplePubSubWebSocketServlet extends WebSocketServlet
     map.put("data", data);
     try {
       webSocket.connection.sendMessage(mapper.writeValueAsString(map));
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       LOG.warn("Connection send error", ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/ApacheRandomLogsTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/ApacheRandomLogsTest.java b/library/src/test/java/com/datatorrent/lib/io/ApacheRandomLogsTest.java
index e619ff8..a2c021e 100644
--- a/library/src/test/java/com/datatorrent/lib/io/ApacheRandomLogsTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/ApacheRandomLogsTest.java
@@ -21,7 +21,6 @@ package com.datatorrent.lib.io;
 import org.junit.Assert;
 import org.junit.Test;
 
-import com.datatorrent.lib.io.ApacheGenRandomLogs;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
 /**
@@ -29,37 +28,39 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class ApacheRandomLogsTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
+  @SuppressWarnings({ "rawtypes", "unchecked", "deprecation" })
   @Test
-	public void test()
-	{
-		ApacheGenRandomLogs oper = new ApacheGenRandomLogs();
-		CollectorTestSink sink = new CollectorTestSink();
-		oper.outport.setSink(sink);
-		oper.setup(null);
+  public void test()
+  {
+    ApacheGenRandomLogs oper = new ApacheGenRandomLogs();
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+    oper.setup(null);
 
-		Thread t = new EmitTuples(oper);
-		t.start();
-		try
-		{
-			Thread.sleep(1000);
-		} catch (InterruptedException e)
-		{
-		}
-		t.stop();
-		Assert.assertTrue("Tuples emitted", sink.collectedTuples.size() > 0);
-	}
+    Thread t = new EmitTuples(oper);
+    t.start();
+    try {
+      Thread.sleep(1000);
+    } catch (InterruptedException e) {
+      //Fixme
+    }
+    t.stop();
+    Assert.assertTrue("Tuples emitted", sink.collectedTuples.size() > 0);
+  }
 
-	private class EmitTuples extends Thread {
-		private ApacheGenRandomLogs oper;
-		public EmitTuples(ApacheGenRandomLogs oper)
-		{
-			this.oper = oper;
-		}
-		@Override
-		public void run()
-		{
-			oper.emitTuples();
-		}
-	}
+  private class EmitTuples extends Thread
+  {
+    private ApacheGenRandomLogs oper;
+
+    public EmitTuples(ApacheGenRandomLogs oper)
+    {
+      this.oper = oper;
+    }
+
+    @Override
+    public void run()
+    {
+      oper.emitTuples();
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/HttpJsonChunksInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/HttpJsonChunksInputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/HttpJsonChunksInputOperatorTest.java
index ada1148..959e25e 100644
--- a/library/src/test/java/com/datatorrent/lib/io/HttpJsonChunksInputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/HttpJsonChunksInputOperatorTest.java
@@ -29,7 +29,6 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.commons.io.IOUtils;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.eclipse.jetty.server.Handler;
@@ -39,6 +38,8 @@ import org.eclipse.jetty.server.handler.AbstractHandler;
 import org.junit.Assert;
 import org.junit.Test;
 
+import org.apache.commons.io.IOUtils;
+
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
 /**
@@ -74,8 +75,7 @@ public class HttpJsonChunksInputOperatorTest
           response.getOutputStream().println();
           response.getOutputStream().println(0);
           response.getOutputStream().flush();
-        }
-        catch (JSONException e) {
+        } catch (JSONException e) {
           response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error generating response: " + e.toString());
         }
 
@@ -88,8 +88,6 @@ public class HttpJsonChunksInputOperatorTest
     server.start();
 
     String url = "http://localhost:" + server.getConnectors()[0].getLocalPort() + "/somecontext";
-    System.out.println(url);
-
     final AbstractHttpInputOperator operator = new HttpJsonChunksInputOperator();
 
     CollectorTestSink sink = new CollectorTestSink();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/HttpLinesInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/HttpLinesInputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/HttpLinesInputOperatorTest.java
index 10ec6c2..be405ab 100644
--- a/library/src/test/java/com/datatorrent/lib/io/HttpLinesInputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/HttpLinesInputOperatorTest.java
@@ -28,7 +28,6 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.commons.io.IOUtils;
 import org.eclipse.jetty.server.Handler;
 import org.eclipse.jetty.server.Request;
 import org.eclipse.jetty.server.Server;
@@ -36,6 +35,8 @@ import org.eclipse.jetty.server.handler.AbstractHandler;
 import org.junit.Assert;
 import org.junit.Test;
 
+import org.apache.commons.io.IOUtils;
+
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.TestUtils;
 
@@ -79,7 +80,6 @@ public class HttpLinesInputOperatorTest
     server.start();
 
     String url = "http://localhost:" + server.getConnectors()[0].getLocalPort() + "/somecontext";
-    System.out.println(url);
 
     final HttpLinesInputOperator operator = new HttpLinesInputOperator();
     CollectorTestSink<String> sink = TestUtils.setSink(operator.outputPort, new CollectorTestSink<String>());

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/HttpMultiValuedMapGetOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/HttpMultiValuedMapGetOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/HttpMultiValuedMapGetOperatorTest.java
index b321cfa..927317e 100644
--- a/library/src/test/java/com/datatorrent/lib/io/HttpMultiValuedMapGetOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/HttpMultiValuedMapGetOperatorTest.java
@@ -28,9 +28,6 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.ws.rs.core.MultivaluedMap;
 
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.core.util.MultivaluedMapImpl;
-
 import org.eclipse.jetty.server.Handler;
 import org.eclipse.jetty.server.Request;
 import org.eclipse.jetty.server.Server;
@@ -38,6 +35,9 @@ import org.eclipse.jetty.server.handler.AbstractHandler;
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.core.util.MultivaluedMapImpl;
+
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.TestUtils;
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/HttpPostOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/HttpPostOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/HttpPostOutputOperatorTest.java
index eb69bdf..afe518b 100644
--- a/library/src/test/java/com/datatorrent/lib/io/HttpPostOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/HttpPostOutputOperatorTest.java
@@ -31,7 +31,6 @@ import javax.servlet.http.HttpServletResponse;
 import javax.ws.rs.Consumes;
 import javax.ws.rs.core.MediaType;
 
-import org.apache.commons.io.IOUtils;
 import org.codehaus.jettison.json.JSONObject;
 import org.eclipse.jetty.server.Handler;
 import org.eclipse.jetty.server.Request;
@@ -40,6 +39,8 @@ import org.eclipse.jetty.server.handler.AbstractHandler;
 import org.junit.Assert;
 import org.junit.Test;
 
+import org.apache.commons.io.IOUtils;
+
 /**
  * Functional test for {@link com.datatorrent.lib.io.HttpPostOutputOperator}.
  */
@@ -75,8 +76,6 @@ public class HttpPostOutputOperatorTest
     server.start();
 
     String url = "http://localhost:" + server.getConnectors()[0].getLocalPort() + "/somecontext";
-    System.out.println("url: " + url);
-
 
     HttpPostOutputOperator<Object> node = new HttpPostOutputOperator<Object>();
     node.setUrl(url);
@@ -95,7 +94,6 @@ public class HttpPostOutputOperatorTest
     }
 
     Assert.assertEquals("number requests", 1, receivedMessages.size());
-    System.out.println(receivedMessages.get(0));
     JSONObject json = new JSONObject(data);
     Assert.assertTrue("request body " + receivedMessages.get(0), receivedMessages.get(0).contains(json.toString()));
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/IdempotentStorageManagerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/IdempotentStorageManagerTest.java b/library/src/test/java/com/datatorrent/lib/io/IdempotentStorageManagerTest.java
index 4b29830..acb3fc4 100644
--- a/library/src/test/java/com/datatorrent/lib/io/IdempotentStorageManagerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/IdempotentStorageManagerTest.java
@@ -79,8 +79,7 @@ public class IdempotentStorageManagerTest
       storageManager.teardown();
       try {
         FileUtils.deleteDirectory(new File("target/" + description.getClassName()));
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException(e);
       }
     }
@@ -105,7 +104,7 @@ public class IdempotentStorageManagerTest
     testMeta.storageManager.save(data, 1, 1);
     testMeta.storageManager.setup(testMeta.context);
     @SuppressWarnings("unchecked")
-    Map<Integer, String> decoded = (Map<Integer, String>) testMeta.storageManager.load(1, 1);
+    Map<Integer, String> decoded = (Map<Integer, String>)testMeta.storageManager.load(1, 1);
     Assert.assertEquals("dataOf1", data, decoded);
   }
 
@@ -130,8 +129,7 @@ public class IdempotentStorageManagerTest
     for (Integer operatorId : decodedStates.keySet()) {
       if (operatorId == 1) {
         Assert.assertEquals("data of 1", dataOf1, decodedStates.get(1));
-      }
-      else {
+      } else {
         Assert.assertEquals("data of 2", dataOf2, decodedStates.get(2));
       }
     }
@@ -182,8 +180,7 @@ public class IdempotentStorageManagerTest
     testMeta.storageManager.save(dataOf2, 2, 1);
     testMeta.storageManager.save(dataOf3, 3, 1);
 
-    testMeta.storageManager.partitioned(Lists.<IdempotentStorageManager>newArrayList(testMeta.storageManager),
-      Sets.newHashSet(2, 3));
+    testMeta.storageManager.partitioned(Lists.<IdempotentStorageManager>newArrayList(testMeta.storageManager), Sets.newHashSet(2, 3));
     testMeta.storageManager.setup(testMeta.context);
     testMeta.storageManager.deleteUpTo(1, 6);
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataOperatorTest.java
index 43f9186..7801619 100644
--- a/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataOperatorTest.java
@@ -34,8 +34,7 @@ public abstract class PubSubWebSocketAppDataOperatorTest
   public static final URI GATEWAY_CONNECT_ADDRESS;
   public static final URI URI_ADDRESS;
 
-  static
-  {
+  static {
     try {
       GATEWAY_CONNECT_ADDRESS = new URI("ws://" + GATEWAY_CONNECT_ADDRESS_STRING + "/pubsub");
       URI_ADDRESS = new URI(URI_ADDRESS_STRING);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQueryTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQueryTest.java b/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQueryTest.java
index 3dc5be3..fc92429 100644
--- a/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQueryTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketAppDataQueryTest.java
@@ -22,13 +22,11 @@ import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import com.datatorrent.lib.helper.OperatorContextTestHelper;
-
 import com.datatorrent.api.Attribute;
 import com.datatorrent.api.Context;
 import com.datatorrent.api.Context.OperatorContext;
-
 import com.datatorrent.common.experimental.AppData.ConnectionInfoProvider;
+import com.datatorrent.lib.helper.OperatorContextTestHelper;
 
 public class PubSubWebSocketAppDataQueryTest extends PubSubWebSocketAppDataOperatorTest
 {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketOperatorTest.java
index 402bb34..e165649 100644
--- a/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/PubSubWebSocketOperatorTest.java
@@ -50,7 +50,7 @@ public class PubSubWebSocketOperatorTest
     contextHandler.addServlet(sh, "/pubsub");
     contextHandler.addServlet(sh, "/*");
     server.start();
-    Connector connector[] = server.getConnectors();
+    Connector[] connector = server.getConnectors();
     URI uri = URI.create("ws://localhost:" + connector[0].getLocalPort() + "/pubsub");
 
     PubSubWebSocketOutputOperator<Object> outputOperator = new PubSubWebSocketOutputOperator<Object>();
@@ -100,10 +100,10 @@ public class PubSubWebSocketOperatorTest
     Assert.assertTrue("tuples emitted", sink.collectedTuples.size() > 1);
 
     @SuppressWarnings("unchecked")
-    Map<String, String> tuple = (Map<String, String>) sink.collectedTuples.get(0);
+    Map<String, String> tuple = (Map<String, String>)sink.collectedTuples.get(0);
     Assert.assertEquals("Expects {\"hello\":\"world\"} as data", "world", tuple.get("hello"));
 
-    String stringResult = (String) sink.collectedTuples.get(1);
+    String stringResult = (String)sink.collectedTuples.get(1);
     Assert.assertEquals("Expects {\"hello\":\"world\"} as data", stringData, stringResult);
 
     inputOperator.deactivate();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/SmtpOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/SmtpOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/SmtpOutputOperatorTest.java
index b4a649a..6bd839d 100644
--- a/library/src/test/java/com/datatorrent/lib/io/SmtpOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/SmtpOutputOperatorTest.java
@@ -26,20 +26,22 @@ import javax.mail.Message;
 import javax.mail.internet.InternetAddress;
 import javax.mail.internet.MimeMessage;
 
-import org.apache.hadoop.conf.Configuration;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.datatorrent.api.DAG;
-import com.datatorrent.api.LocalMode;
-import com.datatorrent.api.StreamingApplication;
+import org.apache.hadoop.conf.Configuration;
+
 import com.google.common.collect.Maps;
 import com.icegreen.greenmail.util.GreenMail;
 import com.icegreen.greenmail.util.ServerSetup;
 import com.icegreen.greenmail.util.ServerSetupTest;
 
+import com.datatorrent.api.DAG;
+import com.datatorrent.api.LocalMode;
+import com.datatorrent.api.StreamingApplication;
+
 public class SmtpOutputOperatorTest
 {
 
@@ -97,7 +99,7 @@ public class SmtpOutputOperatorTest
     String expectedContent = content.replace("{}", data.toString()).trim();
 
     Assert.assertTrue(expectedContent.equals(receivedContent));
-    Assert.assertEquals(from, ((InternetAddress) messages[0].getFrom()[0]).getAddress());
+    Assert.assertEquals(from, ((InternetAddress)messages[0].getFrom()[0]).getAddress());
     Assert.assertEquals(to, messages[0].getRecipients(Message.RecipientType.TO)[0].toString());
     Assert.assertEquals(cc, messages[0].getRecipients(Message.RecipientType.TO)[1].toString());
     Assert.assertEquals(cc, messages[0].getRecipients(Message.RecipientType.CC)[0].toString());
@@ -121,7 +123,7 @@ public class SmtpOutputOperatorTest
     String expectedContent = content.replace("{}", data.toString()).trim();
 
     Assert.assertTrue(expectedContent.equals(receivedContent));
-    Assert.assertEquals(from, ((InternetAddress) messages[0].getFrom()[0]).getAddress());
+    Assert.assertEquals(from, ((InternetAddress)messages[0].getFrom()[0]).getAddress());
     Assert.assertEquals(to, messages[0].getAllRecipients()[0].toString());
   }
 
@@ -139,7 +141,8 @@ public class SmtpOutputOperatorTest
     conf.set(StreamingApplication.DT_PREFIX + "operator.o1.prop.recipients.CC", cc);
 
     final AtomicReference<SmtpOutputOperator> o1 = new AtomicReference<SmtpOutputOperator>();
-    StreamingApplication app = new StreamingApplication() {
+    StreamingApplication app = new StreamingApplication()
+    {
       @Override
       public void populateDAG(DAG dag, Configuration conf)
       {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/SocketInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/SocketInputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/SocketInputOperatorTest.java
index 79d780d..a79ace7 100644
--- a/library/src/test/java/com/datatorrent/lib/io/SocketInputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/SocketInputOperatorTest.java
@@ -87,9 +87,8 @@ public class SocketInputOperatorTest
           reader.close();
           clientChannel.close();
         }
-      }
-      catch (Exception e) {
-        // LOG.debug("server ", e);
+      } catch (Exception e) {
+        //fixme
       }
     }
   }
@@ -118,16 +117,15 @@ public class SocketInputOperatorTest
       operator.endWindow();
       operator.deactivate();
       operator.teardown();
-      String outputString = (String) sink.collectedTuples.get(0);
+      String outputString = (String)sink.collectedTuples.get(0);
       Assert.assertEquals(strBuffer.substring(0, outputString.length()), sink.collectedTuples.get(0));
       int length = outputString.length();
-      outputString = (String) sink.collectedTuples.get(1);
+      outputString = (String)sink.collectedTuples.get(1);
       Assert.assertEquals(strBuffer.substring(length, length + outputString.length()), sink.collectedTuples.get(1));
       server.interrupt();
       server.join();
       Thread.sleep(1000);
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       LOG.debug("exception", e);
     }
   }
@@ -161,15 +159,14 @@ public class SocketInputOperatorTest
       int endIndex = 0;
       int start = 0;
       for (int i = 0; i < 10; i++) {
-        endIndex += ((String) sink.collectedTuples.get(i)).length();
+        endIndex += ((String)sink.collectedTuples.get(i)).length();
         Assert.assertEquals(strBuffer.substring(start, endIndex), sink.collectedTuples.get(i));
         start = endIndex;
       }
       server.interrupt();
       server.join();
       Thread.sleep(1000);
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       LOG.debug("exception", e);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/WebSocketServerInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/WebSocketServerInputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/WebSocketServerInputOperatorTest.java
index 184a6bd..5ce5276 100644
--- a/library/src/test/java/com/datatorrent/lib/io/WebSocketServerInputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/WebSocketServerInputOperatorTest.java
@@ -19,19 +19,18 @@
 package com.datatorrent.lib.io;
 
 import java.net.URI;
-
 import java.util.List;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
-import com.google.common.collect.Lists;
-
 import org.eclipse.jetty.websocket.WebSocket;
 import org.eclipse.jetty.websocket.WebSocketClient;
 import org.eclipse.jetty.websocket.WebSocketClientFactory;
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.collect.Lists;
+
 public class WebSocketServerInputOperatorTest
 {
   @Test
@@ -57,11 +56,10 @@ public class WebSocketServerInputOperatorTest
 
     long startTime = System.currentTimeMillis();
 
-    while(startTime + 10000 > System.currentTimeMillis()) {
-      if(TestWSSIO.messages.size() >= 1) {
+    while (startTime + 10000 > System.currentTimeMillis()) {
+      if (TestWSSIO.messages.size() >= 1) {
         break;
       }
-
       Thread.sleep(100);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorFailureHandlingTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorFailureHandlingTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorFailureHandlingTest.java
index b50fe29..d11125b 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorFailureHandlingTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorFailureHandlingTest.java
@@ -18,23 +18,35 @@
  */
 package com.datatorrent.lib.io.fs;
 
-import com.datatorrent.api.*;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.HashSet;
 
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
+
+import com.google.common.collect.Sets;
+
+import com.datatorrent.api.Attribute;
+import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.lib.helper.OperatorContextTestHelper;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.util.TestUtils;
 import com.datatorrent.lib.util.TestUtils.TestInfo;
-import com.google.common.collect.*;
-import java.io.*;
-import java.util.*;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.fs.*;
-import org.junit.*;
 
 public class AbstractFileInputOperatorFailureHandlingTest
 {
-  @Rule public TestInfo testMeta = new TestInfo();
+  @Rule
+  public TestInfo testMeta = new TestInfo();
 
   public static class TestFileInputOperator extends AbstractFileInputOperator<String>
   {
@@ -60,7 +72,8 @@ public class AbstractFileInputOperatorFailureHandlingTest
       br = null;
     }
 
-    @Override protected InputStream retryFailedFile(FailedFile ff) throws IOException
+    @Override
+    protected InputStream retryFailedFile(FailedFile ff) throws IOException
     {
       count = 0;
       return super.retryFailedFile(ff);
@@ -90,13 +103,13 @@ public class AbstractFileInputOperatorFailureHandlingTest
     FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.getDir()).getAbsolutePath()), true);
     HashSet<String> allLines = Sets.newHashSet();
     // Create files with 100 records.
-    for (int file=0; file<10; file++) {
+    for (int file = 0; file < 10; file++) {
       HashSet<String> lines = Sets.newHashSet();
-      for (int line=0; line<10; line++) {
-        lines.add("f"+file+"l"+line);
+      for (int line = 0; line < 10; line++) {
+        lines.add("f" + file + "l" + line);
       }
       allLines.addAll(lines);
-      FileUtils.write(new File(testMeta.getDir(), "file"+file), StringUtils.join(lines, '\n'));
+      FileUtils.write(new File(testMeta.getDir(), "file" + file), StringUtils.join(lines, '\n'));
     }
 
     Thread.sleep(10);
@@ -104,15 +117,16 @@ public class AbstractFileInputOperatorFailureHandlingTest
     TestFileInputOperator oper = new TestFileInputOperator();
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
-    @SuppressWarnings({ "unchecked", "rawtypes" })
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     oper.setDirectory(testMeta.getDir());
     oper.getScanner().setFilePatternRegexp(".*file[\\d]");
 
-    oper.setup(new OperatorContextTestHelper.TestIdOperatorContext(1, new Attribute.AttributeMap.DefaultAttributeMap()));
-    for (long wid=0; wid<1000; wid++) {
+    oper.setup(
+        new OperatorContextTestHelper.TestIdOperatorContext(1, new Attribute.AttributeMap.DefaultAttributeMap()));
+    for (long wid = 0; wid < 1000; wid++) {
       oper.beginWindow(wid);
       oper.emitTuples();
       oper.endWindow();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java
index 3a8661c..ea16185 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java
@@ -86,34 +86,35 @@ public class AbstractFileInputOperatorTest
     }
   }
 
-  @Rule public TestMeta testMeta = new TestMeta();
+  @Rule
+  public TestMeta testMeta = new TestMeta();
   
   @Test
   public void testSinglePartiton() throws Exception
   {
     FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
     HashSet<String> allLines = Sets.newHashSet();
-    for (int file=0; file<2; file++) {
+    for (int file = 0; file < 2; file++) {
       HashSet<String> lines = Sets.newHashSet();
-      for (int line=0; line<2; line++) {
-        lines.add("f"+file+"l"+line);
+      for (int line = 0; line < 2; line++) {
+        lines.add("f" + file + "l" + line);
       }
       allLines.addAll(lines);
-      FileUtils.write(new File(testMeta.dir, "file"+file), StringUtils.join(lines, '\n'));
+      FileUtils.write(new File(testMeta.dir, "file" + file), StringUtils.join(lines, '\n'));
     }
 
     LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
-    @SuppressWarnings({ "unchecked", "rawtypes" })
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     oper.setDirectory(testMeta.dir);
     oper.getScanner().setFilePatternRegexp(".*file[\\d]");
 
     oper.setup(testMeta.context);
-    for (long wid=0; wid<3; wid++) {
+    for (long wid = 0; wid < 3; wid++) {
       oper.beginWindow(wid);
       oper.emitTuples();
       oper.endWindow();
@@ -133,8 +134,8 @@ public class AbstractFileInputOperatorTest
 
     Path path = new Path(new File(testMeta.dir).getAbsolutePath());
     FileContext.getLocalFSFileContext().delete(path, true);
-    for (int file=0; file<4; file++) {
-      FileUtils.write(new File(testMeta.dir, "partition00"+file), "");
+    for (int file = 0; file < 4; file++) {
+      FileUtils.write(new File(testMeta.dir, "partition00" + file), "");
     }
 
     FileSystem fs = FileSystem.get(FileContext.getLocalFSFileContext().getDefaultFileSystem().getUri(), new Configuration());
@@ -158,13 +159,14 @@ public class AbstractFileInputOperatorTest
 
     Path path = new Path(new File(testMeta.dir).getAbsolutePath());
     FileContext.getLocalFSFileContext().delete(path, true);
-    for (int file=0; file<4; file++) {
-      FileUtils.write(new File(testMeta.dir, "partition00"+file), "");
+    for (int file = 0; file < 4; file++) {
+      FileUtils.write(new File(testMeta.dir, "partition00" + file), "");
     }
 
     List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
     partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
-    Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = oper.definePartitions(partitions, new PartitioningContextImpl(null, 2));
+    Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = oper.definePartitions(partitions,
+        new PartitioningContextImpl(null, 2));
     Assert.assertEquals(2, newPartitions.size());
     Assert.assertEquals(1, oper.getCurrentPartitions()); // partitioned() wasn't called
 
@@ -202,20 +204,20 @@ public class AbstractFileInputOperatorTest
     Path path = new Path(new File(testMeta.dir).getAbsolutePath());
     FileContext.getLocalFSFileContext().delete(path, true);
     int file;
-    for (file=0; file<4; file++) {
-      FileUtils.write(new File(testMeta.dir, "partition00"+file), "a\nb\nc\n");
+    for (file = 0; file < 4; file++) {
+      FileUtils.write(new File(testMeta.dir, "partition00" + file), "a\nb\nc\n");
     }
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
-    @SuppressWarnings({ "unchecked", "rawtypes" })
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     int wid = 0;
 
     // Read all records to populate processedList in operator.
     oper.setup(testMeta.context);
-    for(int i = 0; i < 10; i++) {
+    for (int i = 0; i < 10; i++) {
       oper.beginWindow(wid);
       oper.emitTuples();
       oper.endWindow();
@@ -233,7 +235,7 @@ public class AbstractFileInputOperatorTest
     partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
     // incremental capacity controlled partitionCount property
     Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = initialState.definePartitions(partitions,
-      new PartitioningContextImpl(null, 0));
+        new PartitioningContextImpl(null, 0));
     Assert.assertEquals(2, newPartitions.size());
     Assert.assertEquals(1, initialState.getCurrentPartitions());
     Map<Integer, Partition<AbstractFileInputOperator<String>>> m = Maps.newHashMap();
@@ -253,8 +255,8 @@ public class AbstractFileInputOperatorTest
     }
 
     sink.clear();
-    for(int i = 0; i < 10; i++) {
-      for(AbstractFileInputOperator<String> o : opers) {
+    for (int i = 0; i < 10; i++) {
+      for (AbstractFileInputOperator<String> o : opers) {
         o.beginWindow(wid);
         o.emitTuples();
         o.endWindow();
@@ -266,12 +268,12 @@ public class AbstractFileInputOperatorTest
     Assert.assertEquals("No new tuples read ", 0, sink.collectedTuples.size());
 
     // Add four new files with 3 records each.
-    for (; file<8; file++) {
-      FileUtils.write(new File(testMeta.dir, "partition00"+file), "a\nb\nc\n");
+    for (; file < 8; file++) {
+      FileUtils.write(new File(testMeta.dir, "partition00" + file), "a\nb\nc\n");
     }
 
-    for(int i = 0; i < 10; i++) {
-      for(AbstractFileInputOperator<String> o : opers) {
+    for (int i = 0; i < 10; i++) {
+      for (AbstractFileInputOperator<String> o : opers) {
         o.beginWindow(wid);
         o.emitTuples();
         o.endWindow();
@@ -306,20 +308,20 @@ public class AbstractFileInputOperatorTest
     Path path = new Path(new File(testMeta.dir).getAbsolutePath());
     FileContext.getLocalFSFileContext().delete(path, true);
     int file;
-    for (file=0; file<4; file++) {
-      FileUtils.write(new File(testMeta.dir, "partition00"+file), "a\nb\nc\n");
+    for (file = 0; file < 4; file++) {
+      FileUtils.write(new File(testMeta.dir, "partition00" + file), "a\nb\nc\n");
     }
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
-    @SuppressWarnings({ "unchecked", "rawtypes" })
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     int wid = 0;
 
     //Read some records
     oper.setup(testMeta.context);
-    for(int i = 0; i < 5; i++) {
+    for (int i = 0; i < 5; i++) {
       oper.beginWindow(wid);
       oper.emitTuples();
       oper.endWindow();
@@ -357,8 +359,8 @@ public class AbstractFileInputOperatorTest
     }
 
     sink.clear();
-    for(int i = 0; i < 10; i++) {
-      for(AbstractFileInputOperator<String> o : opers) {
+    for (int i = 0; i < 10; i++) {
+      for (AbstractFileInputOperator<String> o : opers) {
         o.beginWindow(wid);
         o.emitTuples();
         o.endWindow();
@@ -391,20 +393,20 @@ public class AbstractFileInputOperatorTest
     Path path = new Path(new File(testMeta.dir).getAbsolutePath());
     FileContext.getLocalFSFileContext().delete(path, true);
     int file;
-    for (file=0; file<4; file++) {
-      FileUtils.write(new File(testMeta.dir, "partition00"+file), "a\nb\nc\n");
+    for (file = 0; file < 4; file++) {
+      FileUtils.write(new File(testMeta.dir, "partition00" + file), "a\nb\nc\n");
     }
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
-    @SuppressWarnings({ "unchecked", "rawtypes" })
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     int wid = 0;
 
     //Read some records
     oper.setup(testMeta.context);
-    for(int i = 0; i < 5; i++) {
+    for (int i = 0; i < 5; i++) {
       oper.beginWindow(wid);
       oper.emitTuples();
       oper.endWindow();
@@ -442,8 +444,8 @@ public class AbstractFileInputOperatorTest
     }
 
     sink.clear();
-    for(int i = 0; i < 10; i++) {
-      for(AbstractFileInputOperator<String> o : opers) {
+    for (int i = 0; i < 10; i++) {
+      for (AbstractFileInputOperator<String> o : opers) {
         o.beginWindow(wid);
         o.emitTuples();
         o.endWindow();
@@ -475,7 +477,7 @@ public class AbstractFileInputOperatorTest
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
     @SuppressWarnings({ "unchecked", "rawtypes" })
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     oper.setDirectory(testMeta.dir);
@@ -510,7 +512,7 @@ public class AbstractFileInputOperatorTest
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
     @SuppressWarnings({"unchecked", "rawtypes"})
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     oper.setDirectory(testMeta.dir);
@@ -545,7 +547,7 @@ public class AbstractFileInputOperatorTest
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
     @SuppressWarnings({"unchecked", "rawtypes"})
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     oper.setDirectory(testMeta.dir);
@@ -581,7 +583,7 @@ public class AbstractFileInputOperatorTest
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
     @SuppressWarnings({"unchecked", "rawtypes"})
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     oper.setDirectory(testMeta.dir);
@@ -713,7 +715,7 @@ public class AbstractFileInputOperatorTest
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
     @SuppressWarnings({"unchecked", "rawtypes"})
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     oper.setDirectory(testMeta.dir);
@@ -775,7 +777,7 @@ public class AbstractFileInputOperatorTest
 
     CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
     @SuppressWarnings({"unchecked", "rawtypes"})
-    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
+    CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
     oper.output.setSink(sink);
 
     oper.setDirectory(testMeta.dir);
@@ -837,7 +839,7 @@ public class AbstractFileInputOperatorTest
 
     List<TestStorageManager> storageManagers = Lists.newLinkedList();
     for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
-      storageManagers.add((TestStorageManager) p.getPartitionedInstance().idempotentStorageManager);
+      storageManagers.add((TestStorageManager)p.getPartitionedInstance().idempotentStorageManager);
     }
     Assert.assertEquals("count of storage managers", 2, storageManagers.size());
 


[06/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java
index 67518ae..0fff870 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java
@@ -18,21 +18,33 @@
  */
 package com.datatorrent.lib.io.fs;
 
-import java.io.*;
+import java.io.BufferedReader;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.FilterOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
 import java.security.NoSuchAlgorithmException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
+import java.util.TreeSet;
 import java.util.zip.GZIPInputStream;
 
 import javax.annotation.Nonnull;
-import javax.crypto.*;
+import javax.crypto.Cipher;
+import javax.crypto.CipherInputStream;
+import javax.crypto.CipherOutputStream;
+import javax.crypto.KeyGenerator;
+import javax.crypto.SecretKey;
 import javax.crypto.spec.IvParameterSpec;
 import javax.validation.ConstraintViolationException;
 
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.io.Output;
-import com.google.common.io.LimitInputStream;
-
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
@@ -44,13 +56,20 @@ import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 
+import com.esotericsoftware.kryo.Kryo;
+import com.esotericsoftware.kryo.io.Input;
+import com.esotericsoftware.kryo.io.Output;
+import com.google.common.io.LimitInputStream;
+
+import com.datatorrent.api.Attribute;
+import com.datatorrent.api.Context;
+import com.datatorrent.api.DAG;
+import com.datatorrent.api.LocalMode;
+import com.datatorrent.api.StreamingApplication;
 import com.datatorrent.lib.helper.OperatorContextTestHelper;
 import com.datatorrent.lib.testbench.RandomWordGenerator;
 import com.datatorrent.lib.util.TestUtils;
 import com.datatorrent.lib.util.TestUtils.TestInfo;
-
-import com.datatorrent.api.*;
-
 import com.datatorrent.netlet.util.DTThrowable;
 
 public class AbstractFileOutputOperatorTest
@@ -61,7 +80,8 @@ public class AbstractFileOutputOperatorTest
   private static final String EVEN_FILE = "even.txt";
   private static final String ODD_FILE = "odd.txt";
 
-  @Rule public FSTestWatcher testMeta = new FSTestWatcher();
+  @Rule
+  public FSTestWatcher testMeta = new FSTestWatcher();
 
   public static class FSTestWatcher extends TestInfo
   {
@@ -107,12 +127,9 @@ public class AbstractFileOutputOperatorTest
     @Override
     protected String getFileName(Integer tuple)
     {
-      if(tuple % 2 == 0)
-      {
+      if (tuple % 2 == 0) {
         return EVEN_FILE;
-      }
-      else
-      {
+      } else {
         return ODD_FILE;
       }
     }
@@ -185,9 +202,7 @@ public class AbstractFileOutputOperatorTest
     private final Long maxLength;
     private final AbstractFileOutputOperator<byte[]> fsWriter;
 
-    ValidationTestApp(File testDir,
-                      Long maxLength,
-                      AbstractFileOutputOperator<byte[]> fsWriter)
+    ValidationTestApp(File testDir, Long maxLength, AbstractFileOutputOperator<byte[]> fsWriter)
     {
       this.testDir = testDir;
       this.maxLength = maxLength;
@@ -202,17 +217,14 @@ public class AbstractFileOutputOperatorTest
 
       dag.addOperator("random", randomWordGenerator);
 
-      if(maxLength != null) {
+      if (maxLength != null) {
         fsWriter.setMaxLength(maxLength);
       }
 
       fsWriter.setFilePath(testDir.getPath());
-      dag.addOperator("fswriter",
-                      fsWriter);
+      dag.addOperator("fswriter", fsWriter);
 
-      dag.addStream("fswriterstream",
-                    randomWordGenerator.output,
-                    fsWriter.input);
+      dag.addStream("fswriterstream", randomWordGenerator.output, fsWriter.input);
     }
   }
 
@@ -253,8 +265,7 @@ public class AbstractFileOutputOperatorTest
    * @param writer The writer to restore state into.
    */
   @SuppressWarnings({"unchecked", "rawtypes"})
-  public static void restoreCheckPoint(AbstractFileOutputOperator checkPointWriter,
-                                       AbstractFileOutputOperator writer)
+  public static void restoreCheckPoint(AbstractFileOutputOperator checkPointWriter, AbstractFileOutputOperator writer)
   {
     writer.counts = checkPointWriter.counts;
     writer.endOffsets = checkPointWriter.endOffsets;
@@ -271,7 +282,7 @@ public class AbstractFileOutputOperatorTest
 
   public static void checkOutput(int fileCount, String baseFilePath, String expectedOutput)
   {
-    if(fileCount >= 0) {
+    if (fileCount >= 0) {
       baseFilePath += "." + fileCount;
     }
 
@@ -281,15 +292,11 @@ public class AbstractFileOutputOperatorTest
 
     try {
       fileContents = FileUtils.readFileToString(file);
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       DTThrowable.rethrow(ex);
     }
 
-    Assert.assertEquals("Single file " + fileCount +
-                        " output contents",
-                        expectedOutput,
-                        fileContents);
+    Assert.assertEquals("Single file " + fileCount + " output contents", expectedOutput, fileContents);
   }
 
   @Test
@@ -301,14 +308,9 @@ public class AbstractFileOutputOperatorTest
 
     String singleFileName = testMeta.getDir() + File.separator + SINGLE_FILE;
 
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "2\n" +
-                             "3\n";
+    String correctContents = "0\n" + "1\n" + "2\n" + "3\n";
 
-    checkOutput(-1,
-                singleFileName,
-                correctContents);
+    checkOutput(-1, singleFileName, correctContents);
   }
 
   @Test
@@ -322,10 +324,7 @@ public class AbstractFileOutputOperatorTest
   @Test
   public void testSingleFileCompletedWriteOverwriteInitial() throws IOException
   {
-    populateFile(SINGLE_FILE,
-                 "0\n" +
-                 "1\n" +
-                 "2\n");
+    populateFile(SINGLE_FILE, "0\n" + "1\n" + "2\n");
 
     SingleHDFSExactlyOnceWriter writer = new SingleHDFSExactlyOnceWriter();
 
@@ -333,14 +332,8 @@ public class AbstractFileOutputOperatorTest
 
     String singleFileName = testMeta.getDir() + File.separator + SINGLE_FILE;
 
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "2\n" +
-                             "3\n";
-
-    checkOutput(-1,
-                singleFileName,
-                correctContents);
+    String correctContents = "0\n" + "1\n" + "2\n" + "3\n";
+    checkOutput(-1, singleFileName, correctContents);
   }
 
   @Test
@@ -380,16 +373,8 @@ public class AbstractFileOutputOperatorTest
 
     String singleFileName = testMeta.getDir() + File.separator + SINGLE_FILE;
 
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "4\n" +
-                             "5\n" +
-                             "6\n" +
-                             "7\n";
-
-    checkOutput(-1,
-                singleFileName,
-                correctContents);
+    String correctContents = "0\n" + "1\n" + "4\n" + "5\n" + "6\n" + "7\n";
+    checkOutput(-1, singleFileName, correctContents);
   }
 
   @Test
@@ -403,25 +388,14 @@ public class AbstractFileOutputOperatorTest
   public void testSingleFileFailedWriteOverwriteInitial() throws IOException
   {
     SingleHDFSExactlyOnceWriter writer = new SingleHDFSExactlyOnceWriter();
-    populateFile(SINGLE_FILE,
-                 "0\n" +
-                 "1\n" +
-                 "2\n");
+    populateFile(SINGLE_FILE, "0\n" + "1\n" + "2\n");
 
     testSingleFileFailedWriteHelper(writer);
 
     String singleFileName = testMeta.getDir() + File.separator + SINGLE_FILE;
 
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "4\n" +
-                             "5\n" +
-                             "6\n" +
-                             "7\n";
-
-    checkOutput(-1,
-                singleFileName,
-                correctContents);
+    String correctContents = "0\n" + "1\n" + "4\n" + "5\n" + "6\n" + "7\n";
+    checkOutput(-1, singleFileName, correctContents);
   }
 
   @Test
@@ -478,25 +452,13 @@ public class AbstractFileOutputOperatorTest
 
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
 
-    String correctContents = "0\n" +
-                             "2\n" +
-                             "4\n" +
-                             "6\n";
-
-    checkOutput(-1,
-                evenFileName,
-                correctContents);
+    String correctContents = "0\n" + "2\n" + "4\n" + "6\n";
+    checkOutput(-1, evenFileName, correctContents);
 
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
 
-    correctContents = "1\n" +
-                      "3\n" +
-                      "5\n" +
-                      "7\n";
-
-    checkOutput(-1,
-                oddFileName,
-                correctContents);
+    correctContents = "1\n" + "3\n" + "5\n" + "7\n";
+    checkOutput(-1, oddFileName, correctContents);
   }
 
   @Test
@@ -517,25 +479,13 @@ public class AbstractFileOutputOperatorTest
 
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
 
-    String correctContents = "0\n" +
-                             "2\n" +
-                             "4\n" +
-                             "6\n";
-
-    checkOutput(-1,
-                evenFileName,
-                correctContents);
+    String correctContents = "0\n" + "2\n" + "4\n" + "6\n";
+    checkOutput(-1, evenFileName, correctContents);
 
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
 
-    correctContents = "1\n" +
-                      "3\n" +
-                      "5\n" +
-                      "7\n";
-
-    checkOutput(-1,
-                oddFileName,
-                correctContents);
+    correctContents = "1\n" + "3\n" + "5\n" + "7\n";
+    checkOutput(-1, oddFileName, correctContents);
   }
 
   @Test
@@ -549,13 +499,8 @@ public class AbstractFileOutputOperatorTest
   @Test
   public void testMultiFileCompletedWriteOverwriteInitial() throws IOException
   {
-    populateFile(EVEN_FILE,
-                 "0\n" +
-                 "2\n");
-
-    populateFile(ODD_FILE,
-                 "1\n" +
-                 "3\n");
+    populateFile(EVEN_FILE, "0\n" + "2\n");
+    populateFile(ODD_FILE, "1\n" + "3\n");
 
     EvenOddHDFSExactlyOnceWriter writer = new EvenOddHDFSExactlyOnceWriter();
 
@@ -563,25 +508,13 @@ public class AbstractFileOutputOperatorTest
 
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
 
-    String correctContents = "0\n" +
-                             "2\n" +
-                             "4\n" +
-                             "6\n";
-
-    checkOutput(-1,
-                evenFileName,
-                correctContents);
+    String correctContents = "0\n" + "2\n" + "4\n" + "6\n";
+    checkOutput(-1, evenFileName, correctContents);
 
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
 
-    correctContents = "1\n" +
-                      "3\n" +
-                      "5\n" +
-                      "7\n";
-
-    checkOutput(-1,
-                oddFileName,
-                correctContents);
+    correctContents = "1\n" + "3\n" + "5\n" + "7\n";
+    checkOutput(-1, oddFileName, correctContents);
   }
 
   @Test
@@ -594,13 +527,8 @@ public class AbstractFileOutputOperatorTest
   @Test
   public void testMultiFileCompletedWriteOverwriteCache1Initial() throws IOException
   {
-    populateFile(EVEN_FILE,
-                 "0\n" +
-                 "2\n");
-
-    populateFile(ODD_FILE,
-                 "1\n" +
-                 "3\n");
+    populateFile(EVEN_FILE, "0\n" + "2\n");
+    populateFile(ODD_FILE, "1\n" + "3\n");
 
     EvenOddHDFSExactlyOnceWriter writer = new EvenOddHDFSExactlyOnceWriter();
     writer.setMaxOpenFiles(1);
@@ -609,25 +537,13 @@ public class AbstractFileOutputOperatorTest
 
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
 
-    String correctContents = "0\n" +
-                             "2\n" +
-                             "4\n" +
-                             "6\n";
-
-    checkOutput(-1,
-                evenFileName,
-                correctContents);
+    String correctContents = "0\n" + "2\n" + "4\n" + "6\n";
+    checkOutput(-1, evenFileName, correctContents);
 
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
 
-    correctContents = "1\n" +
-                      "3\n" +
-                      "5\n" +
-                      "7\n";
-
-    checkOutput(-1,
-                oddFileName,
-                correctContents);
+    correctContents = "1\n" + "3\n" + "5\n" + "7\n";
+    checkOutput(-1, oddFileName, correctContents);
   }
 
   @Test
@@ -699,25 +615,13 @@ public class AbstractFileOutputOperatorTest
 
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
 
-    String correctContents = "0\n" +
-                             "2\n" +
-                             "6\n" +
-                             "8\n";
-
-    checkOutput(-1,
-                evenFileName,
-                correctContents);
+    String correctContents = "0\n" + "2\n" + "6\n" + "8\n";
+    checkOutput(-1, evenFileName, correctContents);
 
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
 
-    correctContents = "1\n" +
-                      "3\n" +
-                      "7\n" +
-                      "9\n";
-
-    checkOutput(-1,
-      oddFileName,
-      correctContents);
+    correctContents = "1\n" + "3\n" + "7\n" + "9\n";
+    checkOutput(-1, oddFileName, correctContents);
   }
 
   @Test
@@ -737,25 +641,13 @@ public class AbstractFileOutputOperatorTest
 
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
 
-    String correctContents = "0\n" +
-                             "2\n" +
-                             "6\n" +
-                             "8\n";
-
-    checkOutput(-1,
-                evenFileName,
-                correctContents);
+    String correctContents = "0\n" + "2\n" + "6\n" + "8\n";
+    checkOutput(-1, evenFileName, correctContents);
 
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
 
-    correctContents = "1\n" +
-                      "3\n" +
-                      "7\n" +
-                      "9\n";
-
-    checkOutput(-1,
-                oddFileName,
-                correctContents);
+    correctContents = "1\n" + "3\n" + "7\n" + "9\n";
+    checkOutput(-1, oddFileName, correctContents);
   }
 
   @Test
@@ -789,8 +681,7 @@ public class AbstractFileOutputOperatorTest
     writer.endWindow();
     writer.teardown();
 
-    restoreCheckPoint(checkPointWriter,
-                      writer);
+    restoreCheckPoint(checkPointWriter, writer);
     writer.setup(testMeta.testOperatorContext);
 
     writer.beginWindow(2);
@@ -813,23 +704,13 @@ public class AbstractFileOutputOperatorTest
 
     String singleFileName = testMeta.getDir() + File.separator + SINGLE_FILE;
 
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "2\n";
-
-    checkOutput(0,
-                singleFileName,
-                correctContents);
+    String correctContents = "0\n" + "1\n" + "2\n";
+    checkOutput(0, singleFileName, correctContents);
 
     //Rolling file 1
 
-    correctContents = "3\n" +
-                      "4\n" +
-                      "5\n";
-
-    checkOutput(1,
-      singleFileName,
-      correctContents);
+    correctContents = "3\n" + "4\n" + "5\n";
+    checkOutput(1, singleFileName, correctContents);
   }
 
   @Test
@@ -842,47 +723,23 @@ public class AbstractFileOutputOperatorTest
   @Test
   public void testSingleRollingFileCompletedWriteOverwriteInitial() throws IOException
   {
-    populateFile(SINGLE_FILE + ".0",
-                 "0\n" +
-                 "1\n" +
-                 "2\n");
-
-    populateFile(SINGLE_FILE + ".1",
-                 "0\n" +
-                 "1\n" +
-                 "2\n");
-
-
-    populateFile(SINGLE_FILE + ".2",
-                 "0\n" +
-                 "1\n" +
-                 "2\n");
+    populateFile(SINGLE_FILE + ".0", "0\n" + "1\n" + "2\n");
+    populateFile(SINGLE_FILE + ".1", "0\n" + "1\n" + "2\n");
+    populateFile(SINGLE_FILE + ".2", "0\n" + "1\n" + "2\n");
 
     SingleHDFSExactlyOnceWriter writer = new SingleHDFSExactlyOnceWriter();
 
     testSingleRollingFileCompletedWriteHelper(writer);
 
     //Rolling file 0
-
     String singleFileName = testMeta.getDir() + File.separator + SINGLE_FILE;
 
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "2\n";
-
-    checkOutput(0,
-                singleFileName,
-                correctContents);
+    String correctContents = "0\n" + "1\n" + "2\n";
+    checkOutput(0, singleFileName, correctContents);
 
     //Rolling file 1
-
-    correctContents = "3\n" +
-                      "4\n" +
-                      "5\n";
-
-    checkOutput(1,
-                singleFileName,
-                correctContents);
+    correctContents = "3\n" + "4\n" + "5\n";
+    checkOutput(1, singleFileName, correctContents);
   }
 
   @Test
@@ -924,36 +781,18 @@ public class AbstractFileOutputOperatorTest
     testSingleRollingFileFailedWriteHelper(writer);
 
     //Rolling file 0
-
     String singleFileName = testMeta.getDir() + File.separator + SINGLE_FILE;
 
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "2\n";
-
-    checkOutput(0,
-                singleFileName,
-                correctContents);
+    String correctContents = "0\n" + "1\n" + "2\n";
+    checkOutput(0, singleFileName, correctContents);
 
     //Rolling file 1
-
-    correctContents = "3\n" +
-                      "4\n" +
-                      "5\n";
-
-    checkOutput(1,
-                singleFileName,
-                correctContents);
+    correctContents = "3\n" + "4\n" + "5\n";
+    checkOutput(1, singleFileName, correctContents);
 
     //Rolling file 2
-
-    correctContents = "6\n" +
-                      "7\n" +
-                      "8\n";
-
-    checkOutput(2,
-                singleFileName,
-                correctContents);
+    correctContents = "6\n" + "7\n" + "8\n";
+    checkOutput(2, singleFileName, correctContents);
   }
 
   @Test
@@ -984,8 +823,7 @@ public class AbstractFileOutputOperatorTest
 
     writer.teardown();
 
-    restoreCheckPoint(checkPointWriter,
-                      writer);
+    restoreCheckPoint(checkPointWriter, writer);
     writer.setup(testMeta.testOperatorContext);
 
     writer.beginWindow(1);
@@ -1033,8 +871,7 @@ public class AbstractFileOutputOperatorTest
     writer.input.put(5);
     writer.teardown();
 
-    restoreCheckPoint(checkPointWriter,
-                      writer);
+    restoreCheckPoint(checkPointWriter, writer);
     LOG.debug("Checkpoint endOffsets={}", checkPointWriter.endOffsets);
     writer.setup(testMeta.testOperatorContext);
 
@@ -1057,22 +894,12 @@ public class AbstractFileOutputOperatorTest
     String singleFilePath = testMeta.getDir() + File.separator + SINGLE_FILE;
 
     //Rolling file 0
-
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "2\n";
-    checkOutput(0,
-                singleFilePath,
-                correctContents);
+    String correctContents = "0\n" + "1\n" + "2\n";
+    checkOutput(0, singleFilePath, correctContents);
 
     //Rolling file 1
-
-    correctContents = "3\n" +
-                      "4\n";
-
-    checkOutput(1,
-                singleFilePath,
-                correctContents);
+    correctContents = "3\n" + "4\n";
+    checkOutput(1, singleFilePath, correctContents);
   }
 
   @Test
@@ -1173,44 +1000,21 @@ public class AbstractFileOutputOperatorTest
     writer.committed(1);
 
     //Even file
-
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
+    String correctContents = "0\n" + "2\n" + "4\n";
+    checkOutput(0, evenFileName, correctContents);
 
-    String correctContents = "0\n" +
-                             "2\n" +
-                             "4\n";
-
-    checkOutput(0,
-                evenFileName,
-                correctContents);
-
-    correctContents = "6\n" +
-                      "8\n" +
-                      "6\n";
-
-    checkOutput(1,
-                evenFileName,
-                correctContents);
+    correctContents = "6\n" + "8\n" + "6\n";
+    checkOutput(1, evenFileName, correctContents);
 
     //Odd file
-
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
+    correctContents = "1\n" + "3\n" + "5\n";
 
-    correctContents = "1\n" +
-                      "3\n" +
-                      "5\n";
-
-    checkOutput(0,
-                oddFileName,
-                correctContents);
-
-    correctContents = "7\n" +
-                      "9\n" +
-                      "7\n";
+    checkOutput(0, oddFileName, correctContents);
 
-    checkOutput(1,
-                oddFileName,
-                correctContents);
+    correctContents = "7\n" + "9\n" + "7\n";
+    checkOutput(1, oddFileName, correctContents);
   }
 
   private void testMultiRollingFileCompletedWriteHelper(EvenOddHDFSExactlyOnceWriter writer)
@@ -1243,42 +1047,19 @@ public class AbstractFileOutputOperatorTest
     //Even file
 
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
+    String correctContents = "0\n" + "2\n" + "4\n";
+    checkOutput(0, evenFileName, correctContents);
 
-    String correctContents = "0\n" +
-                             "2\n" +
-                             "4\n";
-
-    checkOutput(0,
-                evenFileName,
-                correctContents);
-
-    correctContents = "6\n" +
-                      "8\n" +
-                      "6\n";
-
-    checkOutput(1,
-                evenFileName,
-                correctContents);
+    correctContents = "6\n" + "8\n" + "6\n";
+    checkOutput(1, evenFileName, correctContents);
 
     //Odd file
-
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
+    correctContents = "1\n" + "3\n" + "5\n";
+    checkOutput(0, oddFileName, correctContents);
 
-    correctContents = "1\n" +
-                      "3\n" +
-                      "5\n";
-
-    checkOutput(0,
-                oddFileName,
-                correctContents);
-
-    correctContents = "7\n" +
-                      "9\n" +
-                      "7\n";
-
-    checkOutput(1,
-                oddFileName,
-                correctContents);
+    correctContents = "7\n" + "9\n" + "7\n";
+    checkOutput(1, oddFileName, correctContents);
   }
 
   @Test
@@ -1317,44 +1098,20 @@ public class AbstractFileOutputOperatorTest
     testMultiRollingFileFailedWriteHelper(writer);
 
     //Even file
-
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
+    String correctContents = "0\n" + "2\n" + "4\n";
+    checkOutput(0, evenFileName, correctContents);
 
-    String correctContents = "0\n" +
-                             "2\n" +
-                             "4\n";
-
-    checkOutput(0,
-                evenFileName,
-                correctContents);
-
-    correctContents = "6\n" +
-                      "8\n" +
-                      "6\n";
-
-    checkOutput(1,
-                evenFileName,
-                correctContents);
+    correctContents = "6\n" + "8\n" + "6\n";
+    checkOutput(1, evenFileName, correctContents);
 
     //Odd file
-
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
+    correctContents = "1\n" + "3\n" + "5\n";
+    checkOutput(0, oddFileName, correctContents);
 
-    correctContents = "1\n" +
-                      "3\n" +
-                      "5\n";
-
-    checkOutput(0,
-                oddFileName,
-                correctContents);
-
-    correctContents = "7\n" +
-                      "9\n" +
-                      "7\n";
-
-    checkOutput(1,
-                oddFileName,
-                correctContents);
+    correctContents = "7\n" + "9\n" + "7\n";
+    checkOutput(1, oddFileName, correctContents);
   }
 
   private void testMultiRollingFileFailedWriteHelper(EvenOddHDFSExactlyOnceWriter writer)
@@ -1378,8 +1135,7 @@ public class AbstractFileOutputOperatorTest
     writer.input.put(3);
     writer.teardown();
 
-    restoreCheckPoint(checkPointWriter,
-                      writer);
+    restoreCheckPoint(checkPointWriter, writer);
     writer.setup(testMeta.testOperatorContext);
 
     writer.beginWindow(1);
@@ -1428,47 +1184,25 @@ public class AbstractFileOutputOperatorTest
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
 
-    populateFile(EVEN_FILE + ".0", "0\n" +
-                                      "2\n" +
-                                      "4\n");
-    populateFile(ODD_FILE + ".0", "1\n" +
-                                     "3\n" +
-                                     "5\n");
+    populateFile(EVEN_FILE + ".0", "0\n" + "2\n" + "4\n");
+    populateFile(ODD_FILE + ".0", "1\n" + "3\n" + "5\n");
 
     testMultiRollingFileFailedWriteOverwriteHelperCache1(writer);
 
 
     //Even file
+    String correctContents = "0\n" + "4\n" + "6\n";
+    checkOutput(0, evenFileName, correctContents);
 
-    String correctContents = "0\n" +
-                             "4\n" +
-                             "6\n";
-    checkOutput(0,
-                evenFileName,
-                correctContents);
-
-    correctContents = "8\n" +
-                      "6\n" +
-                      "10\n" ;
-    checkOutput(1,
-                evenFileName,
-                correctContents);
+    correctContents = "8\n" + "6\n" + "10\n";
+    checkOutput(1, evenFileName, correctContents);
 
     //Odd file
+    correctContents = "1\n" + "5\n" + "7\n";
+    checkOutput(0, oddFileName, correctContents);
 
-    correctContents = "1\n" +
-                      "5\n" +
-                      "7\n";
-    checkOutput(0,
-                oddFileName,
-                correctContents);
-
-    correctContents = "9\n" +
-                      "7\n" +
-                      "11\n";
-    checkOutput(1,
-                oddFileName,
-                correctContents);
+    correctContents = "9\n" + "7\n" + "11\n";
+    checkOutput(1, oddFileName, correctContents);
   }
 
   @Test
@@ -1498,8 +1232,7 @@ public class AbstractFileOutputOperatorTest
     writer.input.put(3);
     writer.teardown();
 
-    restoreCheckPoint(checkPointWriter,
-                      writer);
+    restoreCheckPoint(checkPointWriter, writer);
     writer.setup(testMeta.testOperatorContext);
 
     writer.beginWindow(1);
@@ -1525,12 +1258,8 @@ public class AbstractFileOutputOperatorTest
     String evenFileName = testMeta.getDir() + File.separator + EVEN_FILE;
     String oddFileName = testMeta.getDir() + File.separator + ODD_FILE;
 
-    populateFile(EVEN_FILE + ".0", "0\n" +
-                                      "2\n" +
-                                      "4\n");
-    populateFile(ODD_FILE + ".0", "1\n" +
-                                     "3\n" +
-                                     "5\n");
+    populateFile(EVEN_FILE + ".0", "0\n" + "2\n" + "4\n");
+    populateFile(ODD_FILE + ".0", "1\n" + "3\n" + "5\n");
 
     File meta = new File(testMeta.getDir());
     writer.setFilePath(meta.getAbsolutePath());
@@ -1550,8 +1279,7 @@ public class AbstractFileOutputOperatorTest
     writer.input.process(3);
     writer.teardown();
 
-    restoreCheckPoint(checkPointWriter,
-                      writer);
+    restoreCheckPoint(checkPointWriter, writer);
     writer.setup(testMeta.testOperatorContext);
 
     writer.beginWindow(1);
@@ -1572,40 +1300,16 @@ public class AbstractFileOutputOperatorTest
     writer.committed(2);
 
     //Even file
-
-    String correctContents = "0\n" +
-                             "4\n" +
-                             "6\n";
-
-    checkOutput(0,
-                evenFileName,
-                correctContents);
-
-    correctContents = "8\n" +
-                      "6\n" +
-                      "10\n";
-
-    checkOutput(1,
-                evenFileName,
-                correctContents);
+    String correctContents = "0\n" + "4\n" + "6\n";
+    checkOutput(0, evenFileName, correctContents);
+    correctContents = "8\n" + "6\n" + "10\n";
+    checkOutput(1, evenFileName, correctContents);
 
     //Odd file
-
-    correctContents = "1\n" +
-                      "5\n" +
-                      "7\n";
-
-    checkOutput(0,
-                oddFileName,
-                correctContents);
-
-    correctContents = "9\n" +
-                      "7\n" +
-                      "11\n";
-
-    checkOutput(1,
-                oddFileName,
-                correctContents);
+    correctContents = "1\n" + "5\n" + "7\n";
+    checkOutput(0, oddFileName, correctContents);
+    correctContents = "9\n" + "7\n" + "11\n";
+    checkOutput(1, oddFileName, correctContents);
   }
 
   @Test
@@ -1621,43 +1325,20 @@ public class AbstractFileOutputOperatorTest
     String singleFilePath = testMeta.getDir() + File.separator + SINGLE_FILE;
 
     //Rolling file 0
-
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "2\n";
-    checkOutput(0,
-                singleFilePath,
-                correctContents);
+    String correctContents = "0\n" + "1\n" + "2\n";
+    checkOutput(0, singleFilePath, correctContents);
 
     //Rolling file 1
-
-    correctContents = "3\n" +
-                      "4\n" +
-                      "0\n";
-
-    checkOutput(1,
-                singleFilePath,
-                correctContents);
+    correctContents = "3\n" + "4\n" + "0\n";
+    checkOutput(1, singleFilePath, correctContents);
 
     //Rolling file 2
-
-    correctContents = "1\n" +
-                      "2\n" +
-                      "3\n";
-
-    checkOutput(2,
-                singleFilePath,
-                correctContents);
+    correctContents = "1\n" + "2\n" + "3\n";
+    checkOutput(2, singleFilePath, correctContents);
 
     //Rolling file 3
-
-    correctContents = "4\n" +
-                      "5\n" +
-                      "6\n";
-
-    checkOutput(3,
-                singleFilePath,
-                correctContents);
+    correctContents = "4\n" + "5\n" + "6\n";
+    checkOutput(3, singleFilePath, correctContents);
   }
 
   @Test
@@ -1696,8 +1377,7 @@ public class AbstractFileOutputOperatorTest
     writer.endWindow();
     writer.teardown();
 
-    restoreCheckPoint(checkPointWriter,
-                      writer);
+    restoreCheckPoint(checkPointWriter, writer);
     writer.setup(testMeta.testOperatorContext);
 
     writer.beginWindow(1);
@@ -1718,9 +1398,8 @@ public class AbstractFileOutputOperatorTest
   @Test
   public void validateNothingWrongTest()
   {
-    ValidationTestApp validationTestApp = new ValidationTestApp(new File(testMeta.getDir()),
-                                                                null,
-                                                                new SingleHDFSByteExactlyOnceWriter());
+    ValidationTestApp validationTestApp = new ValidationTestApp(new File(testMeta.getDir()), null,
+        new SingleHDFSByteExactlyOnceWriter());
 
     LocalMode.runApp(validationTestApp, 1);
   }
@@ -1728,17 +1407,15 @@ public class AbstractFileOutputOperatorTest
   @Test
   public void validateNegativeMaxLengthTest()
   {
-    ValidationTestApp validationTestApp = new ValidationTestApp(new File(testMeta.getDir()),
-                                                                -1L,
-                                                                new SingleHDFSByteExactlyOnceWriter());
+    ValidationTestApp validationTestApp = new ValidationTestApp(new File(testMeta.getDir()), -1L,
+        new SingleHDFSByteExactlyOnceWriter());
 
     boolean error = false;
 
     try {
       LocalMode.runApp(validationTestApp, 1);
-    }
-    catch(RuntimeException e) {
-      if(e.getCause() instanceof ConstraintViolationException) {
+    } catch (RuntimeException e) {
+      if (e.getCause() instanceof ConstraintViolationException) {
         error = true;
       }
     }
@@ -1994,19 +1671,11 @@ public class AbstractFileOutputOperatorTest
         while ((line = br.readLine()) != null) {
           Assert.assertEquals("File line", eline, line);
           ++count;
-          //System.out.println("line " + line + " " + count);
           if ((count % totalRecords) == 0) {
             ++numWindows;
-            //System.out.println("numWindows " + numWindows);
             eline = "" + (startVal + numWindows * 2);
           }
         }
-        /*
-        if (count > 0) {
-          Assert.assertEquals("Event count", 1000, count);
-          ++numWindows;
-        }
-        */
         startOffset = offset;
       }
     } catch (Exception e) {
@@ -2035,7 +1704,7 @@ public class AbstractFileOutputOperatorTest
     byte[] iv = "TestParam16bytes".getBytes();
     final IvParameterSpec ivps = new IvParameterSpec(iv);
     FilterStreamProvider.FilterChainStreamProvider<FilterOutputStream, OutputStream> chainStreamProvider
-            = new FilterStreamProvider.FilterChainStreamProvider<FilterOutputStream, OutputStream>();
+        = new FilterStreamProvider.FilterChainStreamProvider<FilterOutputStream, OutputStream>();
     chainStreamProvider.addStreamProvider(new FilterStreamCodec.GZipFilterStreamProvider());
 
     // The filter is to keep track of the offsets to handle multi member gzip issue with openjdk
@@ -2118,8 +1787,9 @@ public class AbstractFileOutputOperatorTest
   {
 
     private CounterFilterOutputStream counterStream;
-    
-    public void init(OutputStream outputStream) {
+
+    public void init(OutputStream outputStream)
+    {
       counterStream = new CounterFilterOutputStream(outputStream);
     }
     
@@ -2140,7 +1810,8 @@ public class AbstractFileOutputOperatorTest
 
     }
     
-    public long getCounter() {
+    public long getCounter()
+    {
       if (isDoInit()) {
         return 0;
       } else {
@@ -2154,7 +1825,7 @@ public class AbstractFileOutputOperatorTest
   {
     long counter;
     int refCount;
-    
+
     public CounterFilterOutputStream(OutputStream out)
     {
       super(out);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/fs/AbstractReconcilerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractReconcilerTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractReconcilerTest.java
index 34084f8..4913e17 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractReconcilerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractReconcilerTest.java
@@ -21,17 +21,17 @@ package com.datatorrent.lib.io.fs;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
 import org.junit.Test;
 
+import org.apache.hadoop.conf.Configuration;
+
 import com.google.common.collect.Lists;
 
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.LocalMode;
 import com.datatorrent.api.StreamingApplication;
-
 import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.datatorrent.lib.testbench.RandomWordGenerator;
 
@@ -183,4 +183,4 @@ public class AbstractReconcilerTest
     reconciler1.teardown();
   }
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java
index 07c74db..e5193b6 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java
@@ -18,31 +18,33 @@
  */
 package com.datatorrent.lib.io.fs;
 
-import com.datatorrent.lib.helper.OperatorContextTestHelper;
-import com.datatorrent.lib.io.fs.AbstractFileOutputOperatorTest.FSTestWatcher;
 import java.io.File;
 import java.io.IOException;
+
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.Description;
+
 import org.apache.commons.lang.mutable.MutableLong;
 import org.apache.commons.lang3.mutable.MutableInt;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.runner.Description;
 
 import com.google.common.collect.Maps;
 
-
+import com.datatorrent.lib.helper.OperatorContextTestHelper;
+import com.datatorrent.lib.io.fs.AbstractFileOutputOperatorTest.FSTestWatcher;
 import com.datatorrent.lib.util.TestUtils.TestInfo;
 
 public class AbstractSingleFileOutputOperatorTest
 {
   private static final String SINGLE_FILE = "single.txt";
 
-  @Rule public TestInfo testMeta = new PrivateTestWatcher();
+  @Rule
+  public TestInfo testMeta = new PrivateTestWatcher();
 
   public static OperatorContextTestHelper.TestIdOperatorContext testOperatorContext =
-                new OperatorContextTestHelper.TestIdOperatorContext(0);
+      new OperatorContextTestHelper.TestIdOperatorContext(0);
 
   private static SimpleFileOutputOperator writer;
 
@@ -96,22 +98,20 @@ public class AbstractSingleFileOutputOperatorTest
     CheckPointOutputOperator checkPointWriter = new CheckPointOutputOperator();
     checkPointWriter.counts = Maps.newHashMap();
 
-    for(String keys: writer.counts.keySet()) {
-      checkPointWriter.counts.put(keys,
-                                  new MutableLong(writer.counts.get(keys).longValue()));
+    for (String keys : writer.counts.keySet()) {
+      checkPointWriter.counts.put(keys, new MutableLong(writer.counts.get(keys).longValue()));
     }
 
     checkPointWriter.endOffsets = Maps.newHashMap();
 
-    for(String keys: writer.endOffsets.keySet()) {
+    for (String keys : writer.endOffsets.keySet()) {
       checkPointWriter.endOffsets.put(keys, new MutableLong(writer.endOffsets.get(keys).longValue()));
     }
 
     checkPointWriter.openPart = Maps.newHashMap();
 
-    for(String keys: writer.openPart.keySet()) {
-      checkPointWriter.openPart.put(keys,
-                                    new MutableInt(writer.openPart.get(keys).intValue()));
+    for (String keys : writer.openPart.keySet()) {
+      checkPointWriter.openPart.put(keys, new MutableInt(writer.openPart.get(keys).intValue()));
     }
 
     checkPointWriter.filePath = writer.filePath;
@@ -126,7 +126,7 @@ public class AbstractSingleFileOutputOperatorTest
   }
 
   private void restoreCheckPoint(CheckPointOutputOperator checkPointWriter,
-                                 AbstractSingleFileOutputOperator<Integer> writer)
+      AbstractSingleFileOutputOperator<Integer> writer)
   {
     writer.counts = checkPointWriter.counts;
     writer.endOffsets = checkPointWriter.endOffsets;
@@ -164,14 +164,8 @@ public class AbstractSingleFileOutputOperatorTest
 
     String singleFileName = testMeta.getDir() + File.separator + SINGLE_FILE;
 
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "2\n" +
-                             "3\n";
-
-    AbstractFileOutputOperatorTest.checkOutput(-1,
-      singleFileName,
-      correctContents);
+    String correctContents = "0\n" + "1\n" + "2\n" + "3\n";
+    AbstractFileOutputOperatorTest.checkOutput(-1, singleFileName, correctContents);
   }
 
   @Test
@@ -197,8 +191,7 @@ public class AbstractSingleFileOutputOperatorTest
 
     writer.teardown();
 
-    restoreCheckPoint(checkPointWriter,
-                      writer);
+    restoreCheckPoint(checkPointWriter, writer);
     writer.setup(testOperatorContext);
 
     writer.beginWindow(1);
@@ -215,15 +208,7 @@ public class AbstractSingleFileOutputOperatorTest
 
     String singleFileName = testMeta.getDir() + File.separator + SINGLE_FILE;
 
-    String correctContents = "0\n" +
-                             "1\n" +
-                             "4\n" +
-                             "5\n" +
-                             "6\n" +
-                             "7\n";
-
-    AbstractFileOutputOperatorTest.checkOutput(-1,
-      singleFileName,
-      correctContents);
+    String correctContents = "0\n" + "1\n" + "4\n" + "5\n" + "6\n" + "7\n";
+    AbstractFileOutputOperatorTest.checkOutput(-1, singleFileName, correctContents);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/fs/AbstractWindowFileOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractWindowFileOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractWindowFileOutputOperatorTest.java
index 518a437..32c32f7 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractWindowFileOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractWindowFileOutputOperatorTest.java
@@ -18,18 +18,21 @@
  */
 package com.datatorrent.lib.io.fs;
 
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.Description;
+
 import com.datatorrent.lib.helper.OperatorContextTestHelper;
 import com.datatorrent.lib.io.fs.AbstractFileOutputOperatorTest.FSTestWatcher;
 import com.datatorrent.lib.util.TestUtils.TestInfo;
-import org.junit.*;
-import org.junit.runner.Description;
 
 /**
  * Functional Test for {@link AbstractWindowFileOutputOperator}
  */
 public class AbstractWindowFileOutputOperatorTest
 {
-  @Rule public TestInfo testMeta = new PrivateTestWatcher();
+  @Rule
+  public TestInfo testMeta = new PrivateTestWatcher();
 
   private static WindowFileOutputOperatorString oper;
 
@@ -48,7 +51,7 @@ public class AbstractWindowFileOutputOperatorTest
   }
 
   public static OperatorContextTestHelper.TestIdOperatorContext testOperatorContext =
-                new OperatorContextTestHelper.TestIdOperatorContext(0);
+      new OperatorContextTestHelper.TestIdOperatorContext(0);
 
   public static class WindowFileOutputOperatorString extends AbstractWindowFileOutputOperator<String>
   {
@@ -89,20 +92,11 @@ public class AbstractWindowFileOutputOperatorTest
 
     oper.teardown();
 
-    AbstractFileOutputOperatorTest.checkOutput(-1,
-      testMeta.getDir() + "/" + "0",
-      "window 0\n" +
-        "window 0\n");
+    AbstractFileOutputOperatorTest.checkOutput(-1, testMeta.getDir() + "/" + "0", "window 0\n" + "window 0\n");
 
-    AbstractFileOutputOperatorTest.checkOutput(-1,
-      testMeta.getDir() + "/" + "1",
-      "window_new 1\n" +
-        "window_new 1\n");
+    AbstractFileOutputOperatorTest.checkOutput(-1, testMeta.getDir() + "/" + "1", "window_new 1\n" + "window_new 1\n");
 
-    AbstractFileOutputOperatorTest.checkOutput(-1,
-      testMeta.getDir() + "/" + "2",
-      "window_new 2\n" +
-        "window_new 2\n");
+    AbstractFileOutputOperatorTest.checkOutput(-1, testMeta.getDir() + "/" + "2", "window_new 2\n" + "window_new 2\n");
   }
 
   @Test
@@ -136,20 +130,10 @@ public class AbstractWindowFileOutputOperatorTest
 
     oper.teardown();
 
-    AbstractFileOutputOperatorTest.checkOutput(-1,
-      testMeta.getDir() + "/" + "0",
-      "0\n" +
-        "0\n");
-
-    AbstractFileOutputOperatorTest.checkOutput(-1,
-      testMeta.getDir() + "/" + "1",
-      "1\n" +
-        "1\n" +
-        "1\n");
-
-    AbstractFileOutputOperatorTest.checkOutput(-1,
-      testMeta.getDir() + "/" + "2",
-      "2\n" +
-        "2\n");
+    AbstractFileOutputOperatorTest.checkOutput(-1, testMeta.getDir() + "/" + "0", "0\n" + "0\n");
+
+    AbstractFileOutputOperatorTest.checkOutput(-1, testMeta.getDir() + "/" + "1", "1\n" + "1\n" + "1\n");
+
+    AbstractFileOutputOperatorTest.checkOutput(-1, testMeta.getDir() + "/" + "2", "2\n" + "2\n");
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/fs/FSInputModuleAppTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/FSInputModuleAppTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/FSInputModuleAppTest.java
index 55ee090..19ab84f 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/FSInputModuleAppTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/FSInputModuleAppTest.java
@@ -136,7 +136,7 @@ public class FSInputModuleAppTest
     for (File file : files) {
       filesData.append(FileUtils.readFileToString(file));
     }
-    Assert.assertTrue("File data doesn't contain expected text" , filesData.indexOf(expectedData) > -1);
+    Assert.assertTrue("File data doesn't contain expected text", filesData.indexOf(expectedData) > -1);
   }
 
   private static Logger LOG = LoggerFactory.getLogger(FSInputModuleAppTest.class);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java
index 5c92a82..cf11a25 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java
@@ -570,13 +570,13 @@ public class FileSplitterInputTest
     testMeta.fileSplitterInput.setup(testMeta.context);
 
     testMeta.fileSplitterInput.beginWindow(1);
-    ((MockScanner) testMeta.fileSplitterInput.getScanner()).semaphore.acquire();
+    ((MockScanner)testMeta.fileSplitterInput.getScanner()).semaphore.acquire();
 
     testMeta.fileSplitterInput.emitTuples();
     testMeta.fileSplitterInput.endWindow();
     Assert.assertEquals("File metadata", 15, testMeta.fileMetadataSink.collectedTuples.size());
     for (Object fileMetadata : testMeta.fileMetadataSink.collectedTuples) {
-      FileSplitterInput.FileMetadata metadata = (FileSplitterInput.FileMetadata) fileMetadata;
+      FileSplitterInput.FileMetadata metadata = (FileSplitterInput.FileMetadata)fileMetadata;
       Assert.assertTrue("path: " + metadata.getFilePath(), expectedFiles.contains(metadata.getFilePath()));
       Assert.assertNotNull("name: ", metadata.getFileName());
     }
@@ -596,11 +596,12 @@ public class FileSplitterInputTest
 
     testMeta.fileSplitterInput.setup(testMeta.context);
     testMeta.fileSplitterInput.beginWindow(1);
-    ((MockScanner) testMeta.fileSplitterInput.getScanner()).semaphore.acquire();
+    ((MockScanner)testMeta.fileSplitterInput.getScanner()).semaphore.acquire();
     testMeta.fileSplitterInput.emitTuples();
     testMeta.fileSplitterInput.endWindow();
     Assert.assertEquals("File metadata count", 1, testMeta.fileMetadataSink.collectedTuples.size());
-    Assert.assertEquals("Empty directory not copied.", emptyDir.getName(), testMeta.fileMetadataSink.collectedTuples.get(0).getFileName());
+    Assert.assertEquals("Empty directory not copied.", emptyDir.getName(),
+        testMeta.fileMetadataSink.collectedTuples.get(0).getFileName());
   }
 
   private static class MockScanner extends FileSplitterInput.TimeBasedDirectoryScanner

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java
index db34c01..34ffecd 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java
@@ -163,11 +163,12 @@ public class TailFsInputOperatorTest
     oper.activate(null);
     File file = new File(filePath);
     if (file.exists()) {
-      file.renameTo(new File(filePath+".bk"));      
+      file.renameTo(new File(filePath + ".bk"));
     }
     try {
       Thread.sleep(1000);
     } catch (InterruptedException e) {
+      //fixme
     }
     fstream = new FileWriter(filePath);
     out = new BufferedWriter(fstream);
@@ -213,11 +214,12 @@ public class TailFsInputOperatorTest
     oper.activate(null);
     File file = new File(filePath);
     if (file.exists()) {
-      file.renameTo(new File(filePath+".bk"));      
+      file.renameTo(new File(filePath + ".bk"));
     }
     try {
       Thread.sleep(1000);
     } catch (InterruptedException e) {
+      //fixme
     }
     fstream = new FileWriter(filePath);
     out = new BufferedWriter(fstream);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperatorTest.java
index 59a2f17..2a760ca 100644
--- a/library/src/test/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperatorTest.java
@@ -18,17 +18,12 @@
  */
 package com.datatorrent.lib.io.jms;
 
-import com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap;
-import com.datatorrent.api.DAG;
-import com.datatorrent.lib.helper.OperatorContextTestHelper;
-import com.datatorrent.lib.helper.OperatorContextTestHelper.TestIdOperatorContext;
-import com.datatorrent.lib.util.ActiveMQMultiTypeMessageListener;
 import java.io.File;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
-import org.apache.commons.io.FileUtils;
+
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
@@ -36,6 +31,14 @@ import org.junit.rules.TestWatcher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.io.FileUtils;
+
+import com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap;
+import com.datatorrent.api.DAG;
+import com.datatorrent.lib.helper.OperatorContextTestHelper;
+import com.datatorrent.lib.helper.OperatorContextTestHelper.TestIdOperatorContext;
+import com.datatorrent.lib.util.ActiveMQMultiTypeMessageListener;
+
 /**
  * Test to verify JMS output operator adapter.
  */
@@ -66,8 +69,7 @@ public class JMSMultiPortOutputOperatorTest extends JMSTestBase
 
       try {
         FileUtils.deleteDirectory(new File(FSPsuedoTransactionableStore.DEFAULT_RECOVERY_DIRECTORY));
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         throw new RuntimeException(ex);
       }
     }
@@ -77,8 +79,7 @@ public class JMSMultiPortOutputOperatorTest extends JMSTestBase
     {
       try {
         FileUtils.deleteDirectory(new File(FSPsuedoTransactionableStore.DEFAULT_RECOVERY_DIRECTORY));
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         throw new RuntimeException(ex);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/jms/JMSObjectInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/jms/JMSObjectInputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/jms/JMSObjectInputOperatorTest.java
index 94bdeee..06e94c6 100644
--- a/library/src/test/java/com/datatorrent/lib/io/jms/JMSObjectInputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/jms/JMSObjectInputOperatorTest.java
@@ -20,23 +20,33 @@ package com.datatorrent.lib.io.jms;
 
 import java.io.File;
 
-import javax.jms.*;
+import javax.jms.BytesMessage;
+import javax.jms.Connection;
+import javax.jms.DeliveryMode;
+import javax.jms.Destination;
+import javax.jms.JMSException;
+import javax.jms.MapMessage;
+import javax.jms.MessageProducer;
+import javax.jms.ObjectMessage;
+import javax.jms.Session;
+import javax.jms.StreamMessage;
+import javax.jms.TextMessage;
 
-import org.apache.activemq.ActiveMQConnectionFactory;
-import org.apache.activemq.command.ActiveMQQueue;
-import org.apache.commons.io.FileUtils;
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestWatcher;
 import org.junit.runner.Description;
 
+import org.apache.activemq.ActiveMQConnectionFactory;
+import org.apache.activemq.command.ActiveMQQueue;
+import org.apache.commons.io.FileUtils;
+
 import com.datatorrent.api.Attribute;
 import com.datatorrent.api.Context;
-
-import com.datatorrent.netlet.util.DTThrowable;
 import com.datatorrent.lib.helper.OperatorContextTestHelper;
 import com.datatorrent.lib.testbench.CollectorTestSink;
+import com.datatorrent.netlet.util.DTThrowable;
 
 public class JMSObjectInputOperatorTest
 {
@@ -58,8 +68,7 @@ public class JMSObjectInputOperatorTest
       testBase = new JMSTestBase();
       try {
         testBase.beforTest();
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         throw new RuntimeException(e);
       }
       String methodName = description.getMethodName();
@@ -87,8 +96,7 @@ public class JMSObjectInputOperatorTest
         // Clean up
         session.close();
         connection.close();
-      }
-      catch (JMSException ex) {
+      } catch (JMSException ex) {
         DTThrowable.rethrow(ex);
       }
       operator.deactivate();
@@ -96,8 +104,7 @@ public class JMSObjectInputOperatorTest
       try {
         FileUtils.deleteDirectory(new File("target/" + description.getClassName()));
         testBase.afterTest();
-      }
-      catch (Exception e) {
+      } catch (Exception e) {
         DTThrowable.rethrow(e);
       }
     }
@@ -199,7 +206,7 @@ public class JMSObjectInputOperatorTest
   private void createStreamMsgs(int numMessages) throws Exception
   {
     Long value = 1013L;
-    StreamMessage message=testMeta.session.createStreamMessage();
+    StreamMessage message = testMeta.session.createStreamMessage();
     message.writeObject(value);
     for (int i = 0; i < numMessages; i++) {
       testMeta.producer.send(message);
@@ -208,10 +215,10 @@ public class JMSObjectInputOperatorTest
 
   private void createByteMsgs(int numMessages) throws Exception
   {
-    BytesMessage message=testMeta.session.createBytesMessage();
+    BytesMessage message = testMeta.session.createBytesMessage();
     for (int i = 0; i < numMessages; i++) {
       message.writeBytes(("Message: " + i).getBytes());
-      message.setIntProperty("counter",i);
+      message.setIntProperty("counter", i);
       message.setJMSCorrelationID("MyCorrelationID");
       message.setJMSReplyTo(new ActiveMQQueue("MyReplyTo"));
       message.setJMSType("MyType");
@@ -222,7 +229,7 @@ public class JMSObjectInputOperatorTest
 
   private void createObjectMsgs(int numMessages) throws Exception
   {
-    ObjectMessage message=testMeta.session.createObjectMessage();
+    ObjectMessage message = testMeta.session.createObjectMessage();
     message.setObject("Test for Object Messages");
     for (int i = 0; i < numMessages; i++) {
       testMeta.producer.send(message);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/jms/JMSOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/jms/JMSOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/jms/JMSOutputOperatorTest.java
index 4230611..6f28b19 100644
--- a/library/src/test/java/com/datatorrent/lib/io/jms/JMSOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/jms/JMSOutputOperatorTest.java
@@ -18,25 +18,31 @@
  */
 package com.datatorrent.lib.io.jms;
 
-import com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.DAG;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.Operator.ProcessingMode;
-import com.datatorrent.lib.helper.OperatorContextTestHelper;
-import com.datatorrent.lib.helper.OperatorContextTestHelper.TestIdOperatorContext;
-import com.datatorrent.lib.util.ActiveMQMultiTypeMessageListener;
 import java.io.File;
 import java.io.IOException;
 import java.util.Random;
+
 import javax.jms.JMSException;
 import javax.jms.Message;
-import org.apache.commons.io.FileUtils;
-import org.junit.*;
+
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
 import org.junit.rules.TestWatcher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.io.FileUtils;
+
+import com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap;
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DAG;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.Operator.ProcessingMode;
+import com.datatorrent.lib.helper.OperatorContextTestHelper;
+import com.datatorrent.lib.helper.OperatorContextTestHelper.TestIdOperatorContext;
+import com.datatorrent.lib.util.ActiveMQMultiTypeMessageListener;
+
 /**
  * Test to verify JMS output operator adapter.
  */
@@ -72,8 +78,7 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
       try {
         FileUtils.deleteDirectory(new File(FSPsuedoTransactionableStore.DEFAULT_RECOVERY_DIRECTORY));
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         throw new RuntimeException(ex);
       }
     }
@@ -83,8 +88,7 @@ public class JMSOutputOperatorTest extends JMSTestBase
     {
       try {
         FileUtils.deleteDirectory(new File(FSPsuedoTransactionableStore.DEFAULT_RECOVERY_DIRECTORY));
-      }
-      catch (IOException ex) {
+      } catch (IOException ex) {
         throw new RuntimeException(ex);
       }
     }
@@ -109,8 +113,7 @@ public class JMSOutputOperatorTest extends JMSTestBase
       Message msg;
       try {
         msg = getSession().createTextMessage(tuple.toString());
-      }
-      catch (JMSException ex) {
+      } catch (JMSException ex) {
         throw new RuntimeException("Failed to create message.", ex);
       }
 
@@ -231,144 +234,87 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
   //@Ignore
   @Test
-  public void testBatch()
+  public void testBatch() throws JMSException, InterruptedException
   {
     // Setup a message listener to receive the message
     final ActiveMQMultiTypeMessageListener listener = new ActiveMQMultiTypeMessageListener();
     listener.setTopic(false);
 
-    try {
-      listener.setupConnection();
-    }
-    catch (JMSException ex) {
-      throw new RuntimeException(ex);
-    }
-
+    listener.setupConnection();
     listener.run();
 
     createOperator(false, testOperatorContext);
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     outputOperator.endWindow();
+    Thread.sleep(200);
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     outputOperator.endWindow();
+    Thread.sleep(200);
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should not be written",
-                        BATCH_SIZE + HALF_BATCH_SIZE,
-                        listener.receivedData.size());
+    Assert.assertEquals("Batch should not be written", BATCH_SIZE + HALF_BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(2);
 
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     outputOperator.endWindow();
+    Thread.sleep(200);
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should not be written",
-                        2 * BATCH_SIZE,
-                        listener.receivedData.size());
+    Assert.assertEquals("Batch should not be written", 2 * BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.teardown();
-
     listener.closeConnection();
   }
 
-  //@Ignore
   @Test
-  public void testAtLeastOnceFullBatch()
+  public void testAtLeastOnceFullBatch() throws JMSException, InterruptedException
   {
     // Setup a message listener to receive the message
     final ActiveMQMultiTypeMessageListener listener = new ActiveMQMultiTypeMessageListener();
     listener.setTopic(false);
 
-    try {
-      listener.setupConnection();
-    }
-    catch (JMSException ex) {
-      throw new RuntimeException(ex);
-    }
-
+    listener.setupConnection();
     listener.run();
 
     createOperator(false, testOperatorContext);
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Thread.sleep(200);
+
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Thread.sleep(200);
+
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.teardown();
 
@@ -376,115 +322,65 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     outputOperator.setup(testOperatorContext);
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Thread.sleep(200);
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Thread.sleep(200);
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        2 * BATCH_SIZE,
-                        listener.receivedData.size());
+    Thread.sleep(200);
+    Assert.assertEquals("Batch should be written", 2 * BATCH_SIZE, listener.receivedData.size());
 
     listener.closeConnection();
   }
 
   //@Ignore
   @Test
-  public void testAtLeastOnceHalfBatch()
+  public void testAtLeastOnceHalfBatch() throws JMSException, InterruptedException
   {
     // Setup a message listener to receive the message
     final ActiveMQMultiTypeMessageListener listener = new ActiveMQMultiTypeMessageListener();
     listener.setTopic(false);
 
-    try {
-      listener.setupConnection();
-    }
-    catch (JMSException ex) {
-      throw new RuntimeException(ex);
-    }
-
+    listener.setupConnection();
     listener.run();
 
     createOperator(false, testOperatorContext);
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Thread.sleep(200);
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(1);
-
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Thread.sleep(200);
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.teardown();
 
@@ -492,56 +388,28 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     outputOperator.setup(testOperatorContext);
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Thread.sleep(200);
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     outputOperator.endWindow();
 
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Thread.sleep(200);
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(1);
-
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     outputOperator.endWindow();
-
-    try {
-      Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE + HALF_BATCH_SIZE,
-                        listener.receivedData.size());
-
+    Thread.sleep(200);
+    Assert.assertEquals("Batch should be written", BATCH_SIZE + HALF_BATCH_SIZE, listener.receivedData.size());
     listener.closeConnection();
   }
 
@@ -555,8 +423,7 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     try {
       listener.setupConnection();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -566,9 +433,7 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
@@ -576,31 +441,23 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     try {
       Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     try {
       Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.teardown();
 
@@ -610,9 +467,7 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     outputOperator.beginWindow(2);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
@@ -620,13 +475,10 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     try {
       Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
-    Assert.assertEquals("Batch should be written",
-                        2 * BATCH_SIZE,
-                        listener.receivedData.size());
+    Assert.assertEquals("Batch should be written", 2 * BATCH_SIZE, listener.receivedData.size());
 
     listener.closeConnection();
   }
@@ -641,8 +493,7 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     try {
       listener.setupConnection();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -652,9 +503,7 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     outputOperator.beginWindow(0);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
@@ -662,31 +511,23 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     try {
       Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(1);
 
-    for(int batchCounter = 0;
-        batchCounter < HALF_BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < HALF_BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
     try {
       Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.teardown();
 
@@ -696,19 +537,14 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     try {
       Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
-    Assert.assertEquals("Batch should be written",
-                        BATCH_SIZE,
-                        listener.receivedData.size());
+    Assert.assertEquals("Batch should be written", BATCH_SIZE, listener.receivedData.size());
 
     outputOperator.beginWindow(2);
 
-    for(int batchCounter = 0;
-        batchCounter < BATCH_SIZE;
-        batchCounter++) {
+    for (int batchCounter = 0; batchCounter < BATCH_SIZE; batchCounter++) {
       outputOperator.inputPort.put(Integer.toString(random.nextInt()));
     }
 
@@ -716,14 +552,10 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
     try {
       Thread.sleep(200);
-    }
-    catch (InterruptedException ex) {
+    } catch (InterruptedException ex) {
       throw new RuntimeException(ex);
     }
-    Assert.assertEquals("Batch should be written",
-                        2 * BATCH_SIZE,
-                        listener.receivedData.size());
-
+    Assert.assertEquals("Batch should be written", 2 * BATCH_SIZE, listener.receivedData.size());
     listener.closeConnection();
   }
 
@@ -759,15 +591,14 @@ public class JMSOutputOperatorTest extends JMSTestBase
 
       try {
         msg = getSession().createTextMessage(tuple.toString());
-      }
-      catch (JMSException ex) {
+      } catch (JMSException ex) {
         throw new RuntimeException(ex);
       }
 
       return msg;
     }
   }
-  //@Ignore
+
   @Test
   public void testJMSMultiPortOutputOperator() throws Exception
   {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/jms/JMSTestBase.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/jms/JMSTestBase.java b/library/src/test/java/com/datatorrent/lib/io/jms/JMSTestBase.java
index e179f4d..a59ca6e 100644
--- a/library/src/test/java/com/datatorrent/lib/io/jms/JMSTestBase.java
+++ b/library/src/test/java/com/datatorrent/lib/io/jms/JMSTestBase.java
@@ -27,11 +27,12 @@ import javax.jms.MessageProducer;
 import javax.jms.Session;
 import javax.jms.TextMessage;
 
+import org.junit.After;
+import org.junit.Before;
+
 import org.apache.activemq.ActiveMQConnectionFactory;
 import org.apache.activemq.broker.BrokerService;
 import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.Before;
 
 /**
  * Base class for JMS operators test. <br/>
@@ -101,4 +102,4 @@ public class JMSTestBase
     broker.stop();
     FileUtils.deleteDirectory(new File("target/activemq-data").getAbsoluteFile());
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/io/jms/JMSTransactionableStoreTestBase.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/jms/JMSTransactionableStoreTestBase.java b/library/src/test/java/com/datatorrent/lib/io/jms/JMSTransactionableStoreTestBase.java
index 0db52bf..2064080 100644
--- a/library/src/test/java/com/datatorrent/lib/io/jms/JMSTransactionableStoreTestBase.java
+++ b/library/src/test/java/com/datatorrent/lib/io/jms/JMSTransactionableStoreTestBase.java
@@ -18,16 +18,22 @@
  */
 package com.datatorrent.lib.io.jms;
 
+import java.io.File;
+
+import javax.jms.JMSException;
+
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestWatcher;
+
+import org.apache.commons.io.FileUtils;
+
 import com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap;
 import com.datatorrent.api.DAG;
 import com.datatorrent.lib.helper.OperatorContextTestHelper.TestIdOperatorContext;
 import com.datatorrent.lib.io.jms.JMSOutputOperatorTest.JMSStringSinglePortOutputOperator;
 import com.datatorrent.lib.util.ActiveMQMultiTypeMessageListener;
-import java.io.File;
-import javax.jms.JMSException;
-import org.apache.commons.io.FileUtils;
-import org.junit.*;
-import org.junit.rules.TestWatcher;
 
 /**
  * Base testing class for testing transactionable store implementations.
@@ -82,11 +88,7 @@ public class JMSTransactionableStoreTestBase extends JMSTestBase
 
     try {
       store = storeClass.newInstance();
-    }
-    catch (InstantiationException ex) {
-      throw new RuntimeException(ex);
-    }
-    catch (IllegalAccessException ex) {
+    } catch (InstantiationException | IllegalAccessException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -110,7 +112,7 @@ public class JMSTransactionableStoreTestBase extends JMSTestBase
    */
   private void deleteOperator()
   {
-      outputOperator.teardown();
+    outputOperator.teardown();
   }
 
   //@Ignore
@@ -192,20 +194,13 @@ public class JMSTransactionableStoreTestBase extends JMSTestBase
     deleteOperator();
   }
 
-  //@Ignore
   @Test
-  public void commitTest()
+  public void commitTest() throws JMSException, InterruptedException
   {
     final ActiveMQMultiTypeMessageListener listener = new ActiveMQMultiTypeMessageListener();
     listener.setSubject(SUBJECT);
 
-    try {
-      listener.setupConnection();
-    }
-    catch (JMSException ex) {
-      throw new RuntimeException(ex);
-    }
-
+    listener.setupConnection();
     listener.run();
 
     createOperator();
@@ -213,23 +208,11 @@ public class JMSTransactionableStoreTestBase extends JMSTestBase
     store.beginTransaction();
     outputOperator.inputPort.put("a");
 
-    try {
-      Thread.sleep(500);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-
+    Thread.sleep(500);
     Assert.assertEquals(0, listener.receivedData.size());
     store.commitTransaction();
 
-    try {
-      Thread.sleep(500);
-    }
-    catch (InterruptedException ex) {
-      throw new RuntimeException(ex);
-    }
-
+    Thread.sleep(500);
     Assert.assertEquals(1, listener.receivedData.size());
 
     deleteOperator();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/join/MapTimeBasedJoinOperator.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/join/MapTimeBasedJoinOperator.java b/library/src/test/java/com/datatorrent/lib/join/MapTimeBasedJoinOperator.java
index 391b37d..3672200 100644
--- a/library/src/test/java/com/datatorrent/lib/join/MapTimeBasedJoinOperator.java
+++ b/library/src/test/java/com/datatorrent/lib/join/MapTimeBasedJoinOperator.java
@@ -56,7 +56,9 @@ public class MapTimeBasedJoinOperator
     oper.setup(context);
 
     CollectorTestSink<List<Map<String, Object>>> sink = new CollectorTestSink<List<Map<String, Object>>>();
-    @SuppressWarnings({"unchecked", "rawtypes"}) CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
+
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
     oper.outputPort.setSink(tmp);
 
     oper.beginWindow(0);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/join/POJOTimeBasedJoinOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/join/POJOTimeBasedJoinOperatorTest.java b/library/src/test/java/com/datatorrent/lib/join/POJOTimeBasedJoinOperatorTest.java
index 6914da0..8ec722e 100644
--- a/library/src/test/java/com/datatorrent/lib/join/POJOTimeBasedJoinOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/join/POJOTimeBasedJoinOperatorTest.java
@@ -131,7 +131,8 @@ public class POJOTimeBasedJoinOperatorTest
     oper.setup(MapTimeBasedJoinOperator.context);
 
     CollectorTestSink<List<CustOrder>> sink = new CollectorTestSink<List<CustOrder>>();
-    @SuppressWarnings({"unchecked", "rawtypes"}) CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
     oper.outputPort.setSink(tmp);
 
     oper.beginWindow(0);
@@ -185,7 +186,8 @@ public class POJOTimeBasedJoinOperatorTest
     oper.setup(MapTimeBasedJoinOperator.context);
 
     CollectorTestSink<List<CustOrder>> sink = new CollectorTestSink<List<CustOrder>>();
-    @SuppressWarnings({"unchecked", "rawtypes"}) CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
     oper.outputPort.setSink(tmp);
 
     oper.beginWindow(0);
@@ -257,7 +259,8 @@ public class POJOTimeBasedJoinOperatorTest
     oper.setup(MapTimeBasedJoinOperator.context);
 
     CollectorTestSink<List<CustOrder>> sink = new CollectorTestSink<List<CustOrder>>();
-    @SuppressWarnings({"unchecked", "rawtypes"}) CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
     oper.outputPort.setSink(tmp);
 
     oper.beginWindow(0);
@@ -326,7 +329,8 @@ public class POJOTimeBasedJoinOperatorTest
     oper.setup(MapTimeBasedJoinOperator.context);
 
     CollectorTestSink<List<CustOrder>> sink = new CollectorTestSink<List<CustOrder>>();
-    @SuppressWarnings({"unchecked", "rawtypes"}) CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    CollectorTestSink<Object> tmp = (CollectorTestSink)sink;
     oper.outputPort.setSink(tmp);
 
     oper.beginWindow(0);


[15/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/fs/AbstractReconciler.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractReconciler.java b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractReconciler.java
index 4172ed4..945c000 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractReconciler.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractReconciler.java
@@ -32,14 +32,13 @@ import org.slf4j.LoggerFactory;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Queues;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.Context;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.Operator.CheckpointListener;
 import com.datatorrent.api.Operator.IdleTimeHandler;
-
-import com.datatorrent.netlet.util.DTThrowable;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.common.util.NameableThreadFactory;
+import com.datatorrent.netlet.util.DTThrowable;
 
 /**
  * This base operator queues input tuples for each window and asynchronously processes them after the window is committed.
@@ -115,12 +114,10 @@ public abstract class AbstractReconciler<INPUT, QUEUETUPLE> extends BaseOperator
     if (execute) {
       try {
         Thread.sleep(spinningTime);
-      }
-      catch (InterruptedException ie) {
+      } catch (InterruptedException ie) {
         throw new RuntimeException(ie);
       }
-    }
-    else {
+    } else {
       logger.error("Exception: ", cause);
       DTThrowable.rethrow(cause.get());
     }
@@ -178,14 +175,14 @@ public abstract class AbstractReconciler<INPUT, QUEUETUPLE> extends BaseOperator
             processCommittedData(output);
             doneTuples.add(output);
           }
-        }
-        catch (Throwable e) {
+        } catch (Throwable e) {
           cause.set(e);
           execute = false;
         }
       }
     };
   }
+
   /**
    * The implementation class should call this method to enqueue output once input is converted to queue input.
    *
@@ -203,7 +200,7 @@ public abstract class AbstractReconciler<INPUT, QUEUETUPLE> extends BaseOperator
    *
    * @param input
    */
-  abstract protected void processTuple(INPUT input);
+  protected abstract void processTuple(INPUT input);
 
   /**
    * This method is called once the window in which queueTuple was created is committed.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/fs/AbstractThroughputFileInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractThroughputFileInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractThroughputFileInputOperator.java
index a9604e3..0a96418 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractThroughputFileInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractThroughputFileInputOperator.java
@@ -18,15 +18,16 @@
  */
 package com.datatorrent.lib.io.fs;
 
-import com.datatorrent.api.Stats.OperatorStats;
-import com.datatorrent.lib.counters.BasicCounters;
-
 import java.util.Collection;
 
-import org.apache.commons.lang.mutable.MutableLong;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.lang.mutable.MutableLong;
+
+import com.datatorrent.api.Stats.OperatorStats;
+import com.datatorrent.lib.counters.BasicCounters;
+
 /**
  * This is the base implementation for a file input operator, which scans a directory for files.&nbsp;
  * Files are then read and split into tuples, which are emitted.&nbsp;
@@ -134,7 +135,7 @@ public abstract class AbstractThroughputFileInputOperator<T> extends AbstractFil
     int newOperatorCount;
     int totalFileCount = 0;
 
-    for(Partition<AbstractFileInputOperator<T>> partition : partitions) {
+    for (Partition<AbstractFileInputOperator<T>> partition : partitions) {
       AbstractFileInputOperator<T> oper = partition.getPartitionedInstance();
       totalFileCount += oper.failedFiles.size();
       totalFileCount += oper.pendingFiles.size();
@@ -145,11 +146,10 @@ public abstract class AbstractThroughputFileInputOperator<T> extends AbstractFil
       }
     }
 
-    if(!isInitialParitition) {
+    if (!isInitialParitition) {
       LOG.debug("definePartitions: Total File Count: {}", totalFileCount);
       newOperatorCount = computeOperatorCount(totalFileCount);
-    }
-    else {
+    } else {
       newOperatorCount = partitionCount;
     }
 
@@ -160,13 +160,13 @@ public abstract class AbstractThroughputFileInputOperator<T> extends AbstractFil
   {
     int newOperatorCount = totalFileCount / preferredMaxPendingFilesPerOperator;
 
-    if(totalFileCount % preferredMaxPendingFilesPerOperator > 0) {
+    if (totalFileCount % preferredMaxPendingFilesPerOperator > 0) {
       newOperatorCount++;
     }
-    if(newOperatorCount > partitionCount) {
+    if (newOperatorCount > partitionCount) {
       newOperatorCount = partitionCount;
     }
-    if(newOperatorCount == 0) {
+    if (newOperatorCount == 0) {
       newOperatorCount = 1;
     }
 
@@ -179,17 +179,17 @@ public abstract class AbstractThroughputFileInputOperator<T> extends AbstractFil
   {
     BasicCounters<MutableLong> fileCounters = null;
 
-    for(OperatorStats operatorStats: batchedOperatorStats.getLastWindowedStats()) {
-      if(operatorStats.counters != null) {
-        fileCounters = (BasicCounters<MutableLong>) operatorStats.counters;
+    for (OperatorStats operatorStats : batchedOperatorStats.getLastWindowedStats()) {
+      if (operatorStats.counters != null) {
+        fileCounters = (BasicCounters<MutableLong>)operatorStats.counters;
       }
     }
 
     Response response = new Response();
 
-    if(fileCounters != null &&
-       fileCounters.getCounter(FileCounters.PENDING_FILES).longValue() > 0L ||
-       System.currentTimeMillis() - repartitionInterval <= lastRepartition) {
+    if (fileCounters != null &&
+        fileCounters.getCounter(FileCounters.PENDING_FILES).longValue() > 0L ||
+        System.currentTimeMillis() - repartitionInterval <= lastRepartition) {
       response.repartitionRequired = false;
       return response;
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/fs/FileSplitter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/FileSplitter.java b/library/src/main/java/com/datatorrent/lib/io/fs/FileSplitter.java
index 48d4ae6..69e44a5 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/FileSplitter.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/FileSplitter.java
@@ -21,7 +21,11 @@ package com.datatorrent.lib.io.fs;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
-import java.util.*;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingDeque;
@@ -33,13 +37,14 @@ import javax.annotation.Nullable;
 import javax.validation.constraints.Min;
 import javax.validation.constraints.NotNull;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.commons.lang.mutable.MutableLong;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
@@ -49,13 +54,16 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
-import com.datatorrent.api.*;
+import com.datatorrent.api.Component;
+import com.datatorrent.api.Context;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.InputOperator;
+import com.datatorrent.api.Operator;
 import com.datatorrent.api.annotation.OperatorAnnotation;
-
-import com.datatorrent.netlet.util.DTThrowable;
 import com.datatorrent.lib.counters.BasicCounters;
 import com.datatorrent.lib.io.IdempotentStorageManager;
 import com.datatorrent.lib.io.block.BlockMetadata.FileBlockMetadata;
+import com.datatorrent.netlet.util.DTThrowable;
 
 /**
  * Input operator that scans a directory for files and splits a file into blocks.<br/>
@@ -129,8 +137,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
 
     try {
       fs = scanner.getFSInstance();
-    }
-    catch (IOException e) {
+    } catch (IOException e) {
       throw new RuntimeException("creating fs", e);
     }
 
@@ -138,10 +145,10 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
       blockSize = fs.getDefaultBlockSize(new Path(scanner.files.iterator().next()));
     }
 
-    if (context.getValue(Context.OperatorContext.ACTIVATION_WINDOW_ID) < idempotentStorageManager.getLargestRecoveryWindow()) {
+    if (context.getValue(Context.OperatorContext.ACTIVATION_WINDOW_ID) <
+        idempotentStorageManager.getLargestRecoveryWindow()) {
       blockMetadataIterator = null;
-    }
-    else {
+    } else {
       //don't setup scanner while recovery
       scanner.setup(context);
     }
@@ -153,15 +160,12 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
   {
     try {
       scanner.teardown();
-    }
-    catch (Throwable t) {
+    } catch (Throwable t) {
       DTThrowable.rethrow(t);
-    }
-    finally {
+    } finally {
       try {
         fs.close();
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException(e);
       }
     }
@@ -181,8 +185,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
   {
     try {
       @SuppressWarnings("unchecked")
-      LinkedList<FileInfo> recoveredData = (LinkedList<FileInfo>) idempotentStorageManager.load(operatorId,
-        windowId);
+      LinkedList<FileInfo> recoveredData = (LinkedList<FileInfo>)idempotentStorageManager.load(operatorId, windowId);
       if (recoveredData == null) {
         //This could happen when there are multiple physical instances and one of them is ahead in processing windows.
         return;
@@ -193,8 +196,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
       for (FileInfo info : recoveredData) {
         if (info.directoryPath != null) {
           scanner.lastModifiedTimes.put(info.directoryPath, info.modifiedTime);
-        }
-        else { //no directory
+        } else { //no directory
           scanner.lastModifiedTimes.put(info.relativeFilePath, info.modifiedTime);
         }
 
@@ -211,8 +213,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
       if (windowId == idempotentStorageManager.getLargestRecoveryWindow()) {
         scanner.setup(context);
       }
-    }
-    catch (IOException e) {
+    } catch (IOException e) {
       throw new RuntimeException("replay", e);
     }
   }
@@ -250,8 +251,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
         if (fileInfo.lastFileOfScan) {
           break;
         }
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException("creating metadata", e);
       }
     }
@@ -263,8 +263,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
     if (currentWindowId > idempotentStorageManager.getLargestRecoveryWindow()) {
       try {
         idempotentStorageManager.save(currentWindowRecoveryState, operatorId, currentWindowId);
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException("saving recovery", e);
       }
     }
@@ -280,8 +279,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
     while (blockMetadataIterator.hasNext()) {
       if (blockCount++ < blocksThreshold) {
         this.blocksMetadataOutput.emit(blockMetadataIterator.next());
-      }
-      else {
+      } else {
         return false;
       }
     }
@@ -293,7 +291,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
    * Can be overridden for creating block metadata of a type that extends {@link FileBlockMetadata}
    */
   protected FileBlockMetadata createBlockMetadata(long pos, long lengthOfFileInBlock, int blockNumber,
-                                                  FileMetadata fileMetadata, boolean isLast)
+      FileMetadata fileMetadata, boolean isLast)
   {
     return new FileBlockMetadata(fileMetadata.getFilePath(), fileMetadata.getBlockIds()[blockNumber - 1], pos,
       lengthOfFileInBlock, isLast, blockNumber == 1 ? -1 : fileMetadata.getBlockIds()[blockNumber - 2]);
@@ -321,7 +319,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
     fileMetadata.setFileLength(status.getLen());
 
     if (!status.isDirectory()) {
-      int noOfBlocks = (int) ((status.getLen() / blockSize) + (((status.getLen() % blockSize) == 0) ? 0 : 1));
+      int noOfBlocks = (int)((status.getLen() / blockSize) + (((status.getLen() % blockSize) == 0) ? 0 : 1));
       if (fileMetadata.getDataOffset() >= status.getLen()) {
         noOfBlocks = 0;
       }
@@ -335,7 +333,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
   {
     // block ids are 32 bits of operatorId | 32 bits of sequence number
     long[] blockIds = new long[fileMetadata.getNumberOfBlocks()];
-    long longLeftSide = ((long) operatorId) << 32;
+    long longLeftSide = ((long)operatorId) << 32;
     for (int i = 0; i < fileMetadata.getNumberOfBlocks(); i++) {
       blockIds[i] = longLeftSide | sequenceNo++ & 0xFFFFFFFFL;
     }
@@ -392,8 +390,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
   {
     try {
       idempotentStorageManager.deleteUpTo(operatorId, l);
-    }
-    catch (IOException e) {
+    } catch (IOException e) {
       throw new RuntimeException(e);
     }
   }
@@ -671,8 +668,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
       }
       try {
         fs = getFSInstance();
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException("opening fs", e);
       }
       scanService.submit(this);
@@ -685,8 +681,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
       scanService.shutdownNow();
       try {
         fs.close();
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException("closing fs", e);
       }
     }
@@ -708,13 +703,11 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
               scan(new Path(afile), null);
             }
             scanComplete();
-          }
-          else {
+          } else {
             Thread.sleep(sleepMillis);
           }
         }
-      }
-      catch (Throwable throwable) {
+      } catch (Throwable throwable) {
         LOG.error("service", throwable);
         running = false;
         atomicThrowable.set(throwable);
@@ -776,29 +769,25 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
             LOG.debug("found {}", childPathStr);
 
             FileInfo info;
-            if(rootPath == null) {
-             info =parentStatus.isDirectory() ?
-                new FileInfo(parentPathStr, childPath.getName(), parentStatus.getModificationTime()) :
-                new FileInfo(null, childPathStr, parentStatus.getModificationTime());
-            }
-            else {
+            if (rootPath == null) {
+              info = parentStatus.isDirectory() ?
+                  new FileInfo(parentPathStr, childPath.getName(), parentStatus.getModificationTime()) :
+                  new FileInfo(null, childPathStr, parentStatus.getModificationTime());
+            } else {
               URI relativeChildURI = rootPath.toUri().relativize(childPath.toUri());
               info = new FileInfo(rootPath.toUri().getPath(), relativeChildURI.getPath(),
-                parentStatus.getModificationTime());
+                  parentStatus.getModificationTime());
             }
 
             discoveredFiles.add(info);
-          }
-          else {
+          } else {
             // don't look at it again
             ignoredFiles.add(childPathStr);
           }
         }
-      }
-      catch (FileNotFoundException fnf) {
+      } catch (FileNotFoundException fnf) {
         LOG.warn("Failed to list directory {}", filePath, fnf);
-      }
-      catch (IOException e) {
+      } catch (IOException e) {
         throw new RuntimeException("listing files", e);
       }
     }
@@ -813,7 +802,7 @@ public class FileSplitter implements InputOperator, Operator.CheckpointListener
      * @throws IOException
      */
     protected boolean skipFile(@SuppressWarnings("unused") @NotNull Path path, @NotNull Long modificationTime,
-                               Long lastModificationTime) throws IOException
+        Long lastModificationTime) throws IOException
     {
       return (!(lastModificationTime == null || modificationTime > lastModificationTime));
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java
index 595abde..35530a3 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java
@@ -44,7 +44,7 @@ public interface FilterStreamContext<F extends FilterOutputStream>
    * Base filter context that can be extended to build custom filters.
    * @param <F> The Filter output stream
    */
-  public static abstract class BaseFilterStreamContext<F extends FilterOutputStream> implements FilterStreamContext<F>
+  abstract class BaseFilterStreamContext<F extends FilterOutputStream> implements FilterStreamContext<F>
   {
     protected transient F filterStream;
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java
index 58b51af..75e6e5f 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java
@@ -21,7 +21,12 @@ package com.datatorrent.lib.io.fs;
 import java.io.FilterOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 
 import com.google.common.collect.Maps;
 
@@ -37,7 +42,7 @@ public interface FilterStreamProvider<F extends FilterOutputStream, S extends Ou
   
   public void reclaimFilterStreamContext(FilterStreamContext<F> filterStreamContext);
   
-  public static abstract class SimpleFilterReusableStreamProvider<F extends FilterOutputStream, S extends OutputStream> implements FilterStreamProvider<F, S>
+  abstract class SimpleFilterReusableStreamProvider<F extends FilterOutputStream, S extends OutputStream> implements FilterStreamProvider<F, S>
   {
 
     private transient Map<OutputStream, FilterStreamContext<F>> reusableContexts = Maps.newHashMap();
@@ -112,7 +117,9 @@ public interface FilterStreamProvider<F extends FilterOutputStream, S extends Ou
       }
     }
 
-    private class FilterChainStreamContext extends FilterStreamContext.BaseFilterStreamContext implements FilterStreamContext {
+    private class FilterChainStreamContext extends FilterStreamContext.BaseFilterStreamContext
+        implements FilterStreamContext
+    {
       
       private List<FilterStreamContext<?>> streamContexts = new ArrayList<FilterStreamContext<?>>();
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/fs/TailFsInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/TailFsInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/fs/TailFsInputOperator.java
index 4ac03a6..f2e9a8c 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/TailFsInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/TailFsInputOperator.java
@@ -22,10 +22,11 @@ import java.io.File;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 
-import org.apache.commons.io.FileUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.io.FileUtils;
+
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.InputOperator;
@@ -261,6 +262,7 @@ public class TailFsInputOperator implements InputOperator, ActivationListener<Op
         try {
           Thread.sleep(delay);
         } catch (InterruptedException e) {
+          //swallowing exception
         }
         --localCounter;
       }
@@ -286,7 +288,7 @@ public class TailFsInputOperator implements InputOperator, ActivationListener<Op
     }
     accessTime = System.currentTimeMillis();
     while ((ch = reader.read()) != -1) {
-      readChar = (char) ch;
+      readChar = (char)ch;
       if (readChar != delimiter) {
         sb.append(readChar);
       } else {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/fs/package-info.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/package-info.java b/library/src/main/java/com/datatorrent/lib/io/fs/package-info.java
index 2009d61..872a618 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/package-info.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/package-info.java
@@ -20,4 +20,4 @@
  * Library of input operators for writing into file streams and output operators for reading from file streams.
  * The file I/O operators interact with entities outside of DAG, and at times outside of Hadoop
  */
-package com.datatorrent.lib.io.fs;
\ No newline at end of file
+package com.datatorrent.lib.io.fs;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSOutputOperator.java
index 617b397..bac0816 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSOutputOperator.java
@@ -18,18 +18,22 @@
  */
 package com.datatorrent.lib.io.jms;
 
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.DAG;
-import com.datatorrent.api.Operator;
-import com.google.common.collect.Lists;
 import java.io.IOException;
 import java.util.List;
+
 import javax.jms.JMSException;
 import javax.jms.Message;
 import javax.jms.MessageProducer;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Lists;
+
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DAG;
+import com.datatorrent.api.Operator;
+
 /**
  * This is the base implementation of an JMS output operator.&nbsp;
  * A concrete operator should be created from this skeleton implementation.
@@ -93,8 +97,7 @@ public abstract class AbstractJMSOutputOperator extends JMSBase implements Opera
 
     try {
       createConnection();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       logger.debug(ex.getLocalizedMessage());
       throw new RuntimeException(ex);
     }
@@ -103,8 +106,7 @@ public abstract class AbstractJMSOutputOperator extends JMSBase implements Opera
 
     try {
       store.connect();
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -112,7 +114,7 @@ public abstract class AbstractJMSOutputOperator extends JMSBase implements Opera
 
     mode = context.getValue(OperatorContext.PROCESSING_MODE);
 
-    if(mode==ProcessingMode.AT_MOST_ONCE){
+    if (mode == ProcessingMode.AT_MOST_ONCE) {
       //Batch must be cleared to avoid writing same data twice
       tupleBatch.clear();
     }
@@ -135,8 +137,7 @@ public abstract class AbstractJMSOutputOperator extends JMSBase implements Opera
     logger.debug("beginning teardown");
     try {
       store.disconnect();
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -161,7 +162,7 @@ public abstract class AbstractJMSOutputOperator extends JMSBase implements Opera
   {
     logger.debug("Ending window {}", currentWindowId);
 
-    if(store.isExactlyOnce()) {
+    if (store.isExactlyOnce()) {
       //Store committed window and data in same transaction
       if (committedWindowId < currentWindowId) {
         store.storeCommittedWindowId(appId, operatorId, currentWindowId);
@@ -170,8 +171,7 @@ public abstract class AbstractJMSOutputOperator extends JMSBase implements Opera
 
       flushBatch();
       store.commitTransaction();
-    }
-    else {
+    } else {
       //For transactionable stores which cannot support exactly once, At least
       //once can be insured by for storing the data and then the committed window
       //id.
@@ -194,11 +194,10 @@ public abstract class AbstractJMSOutputOperator extends JMSBase implements Opera
   {
     logger.debug("flushing batch, batch size {}", tupleBatch.size());
 
-    for (Message message: messageBatch) {
+    for (Message message : messageBatch) {
       try {
         producer.send(message);
-      }
-      catch (JMSException ex) {
+      } catch (JMSException ex) {
         throw new RuntimeException(ex);
       }
     }
@@ -215,7 +214,7 @@ public abstract class AbstractJMSOutputOperator extends JMSBase implements Opera
    */
   protected void sendMessage(Object data)
   {
-    if(currentWindowId <= committedWindowId) {
+    if (currentWindowId <= committedWindowId) {
       return;
     }
 
@@ -249,8 +248,7 @@ public abstract class AbstractJMSOutputOperator extends JMSBase implements Opera
       producer = null;
 
       super.cleanup();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       logger.error(null, ex);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java
index c7ffed3..efda6b0 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java
@@ -18,10 +18,11 @@
  */
 package com.datatorrent.lib.io.jms;
 
-import com.datatorrent.api.DefaultInputPort;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.DefaultInputPort;
+
 /**
  * This is the base implementation of a single port JMS output operator.&nbsp;
  * A concrete operator should be created from this skeleton implementation.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/jms/FSPsuedoTransactionableStore.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/FSPsuedoTransactionableStore.java b/library/src/main/java/com/datatorrent/lib/io/jms/FSPsuedoTransactionableStore.java
index 61f1eb7..31eaf18 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/FSPsuedoTransactionableStore.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/FSPsuedoTransactionableStore.java
@@ -18,15 +18,22 @@
  */
 package com.datatorrent.lib.io.jms;
 
-import com.datatorrent.api.annotation.Stateless;
 import java.io.IOException;
+
 import javax.jms.JMSException;
 import javax.validation.constraints.NotNull;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+
+import com.datatorrent.api.annotation.Stateless;
+
 /**
  * This is a JMS store which stores committed window ids in a file. This is not a true
  * transactionable store because there is a chance that a failure may occur in between storing the
@@ -85,9 +92,8 @@ public class FSPsuedoTransactionableStore extends JMSBaseTransactionableStore
   {
     FileSystem tempFS = FileSystem.newInstance(new Path(recoveryDirectory).toUri(), new Configuration());
 
-    if(tempFS instanceof LocalFileSystem)
-    {
-      tempFS = ((LocalFileSystem) tempFS).getRaw();
+    if (tempFS instanceof LocalFileSystem) {
+      tempFS = ((LocalFileSystem)tempFS).getRaw();
     }
 
     return tempFS;
@@ -118,12 +124,10 @@ public class FSPsuedoTransactionableStore extends JMSBaseTransactionableStore
 
     try {
       //No committed window stored, return negative invalid window.
-      if(!fs.exists(recoveryPath))
-      {
+      if (!fs.exists(recoveryPath)) {
         return Stateless.WINDOW_ID;
       }
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -132,16 +136,15 @@ public class FSPsuedoTransactionableStore extends JMSBaseTransactionableStore
     try {
       FileStatus[] windowFiles = fs.listStatus(recoveryPath);
 
-      for(FileStatus fileStatus: windowFiles) {
+      for (FileStatus fileStatus : windowFiles) {
         String windowString = fileStatus.getPath().getName();
         long tempWindow = Long.parseLong(windowString);
 
-        if(maxWindow < tempWindow) {
+        if (maxWindow < tempWindow) {
           maxWindow = tempWindow;
         }
       }
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -159,14 +162,13 @@ public class FSPsuedoTransactionableStore extends JMSBaseTransactionableStore
       fs.create(windowPath);
       FileStatus[] windowFiles = fs.listStatus(recoveryPath);
 
-      for(FileStatus fileStatus: windowFiles) {
+      for (FileStatus fileStatus : windowFiles) {
         Path tempPath = fileStatus.getPath();
-        if(!tempPath.getName().equals(windowString)) {
+        if (!tempPath.getName().equals(windowString)) {
           fs.delete(tempPath, true);
         }
       }
-    }
-    catch (IOException ex) {
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -175,10 +177,8 @@ public class FSPsuedoTransactionableStore extends JMSBaseTransactionableStore
   public void removeCommittedWindowId(String appId, int operatorId)
   {
     try {
-      fs.delete(getOperatorRecoveryPath(appId, operatorId).getParent(),
-                true);
-    }
-    catch (IOException ex) {
+      fs.delete(getOperatorRecoveryPath(appId, operatorId).getParent(), true);
+    } catch (IOException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -194,8 +194,7 @@ public class FSPsuedoTransactionableStore extends JMSBaseTransactionableStore
   {
     try {
       this.getBase().getSession().commit();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -207,8 +206,7 @@ public class FSPsuedoTransactionableStore extends JMSBaseTransactionableStore
   {
     try {
       this.getBase().getSession().rollback();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -249,13 +247,9 @@ public class FSPsuedoTransactionableStore extends JMSBaseTransactionableStore
     return false;
   }
 
-  private Path getOperatorRecoveryPath(String appId,
-                                       int operatorId)
+  private Path getOperatorRecoveryPath(String appId, int operatorId)
   {
-    return new Path(DEFAULT_RECOVERY_DIRECTORY + "/" +
-                         appId + "/" +
-                         operatorId + "/" +
-                         COMMITTED_WINDOW_DIR);
+    return new Path(DEFAULT_RECOVERY_DIRECTORY + "/" + appId + "/" + operatorId + "/" + COMMITTED_WINDOW_DIR);
   }
 
   /**
@@ -265,14 +259,9 @@ public class FSPsuedoTransactionableStore extends JMSBaseTransactionableStore
    * @param windowId The id of the current window.
    * @return The path where the windowId is stored.
    */
-  private Path getOperatorWindowRecoveryPath(String appId,
-                                       int operatorId,
-                                       long windowId)
+  private Path getOperatorWindowRecoveryPath(String appId, int operatorId, long windowId)
   {
-    return new Path(DEFAULT_RECOVERY_DIRECTORY + "/" +
-                         appId + "/" +
-                         operatorId + "/" +
-                         COMMITTED_WINDOW_DIR + "/" +
-                         windowId);
+    return new Path(DEFAULT_RECOVERY_DIRECTORY + "/" + appId + "/" + operatorId + "/" + COMMITTED_WINDOW_DIR + "/" +
+        windowId);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java b/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java
index 6db6a4d..48ed2c3 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java
@@ -315,17 +315,13 @@ public class JMSBase
   {
     if ("CLIENT_ACKNOWLEDGE".equals(ackMode)) {
       return Session.CLIENT_ACKNOWLEDGE;
-    }
-    else if ("AUTO_ACKNOWLEDGE".equals(ackMode)) {
+    } else if ("AUTO_ACKNOWLEDGE".equals(ackMode)) {
       return Session.AUTO_ACKNOWLEDGE;
-    }
-    else if ("DUPS_OK_ACKNOWLEDGE".equals(ackMode)) {
+    } else if ("DUPS_OK_ACKNOWLEDGE".equals(ackMode)) {
       return Session.DUPS_OK_ACKNOWLEDGE;
-    }
-    else if ("SESSION_TRANSACTED".equals(ackMode)) {
+    } else if ("SESSION_TRANSACTED".equals(ackMode)) {
       return Session.SESSION_TRANSACTED;
-    }
-    else {
+    } else {
       return Session.CLIENT_ACKNOWLEDGE; // default
     }
   }
@@ -372,8 +368,7 @@ public class JMSBase
       BeanUtils.populate(cf, connectionFactoryProperties);
       logger.debug("creation successful.");
       return cf;
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       throw new RuntimeException("Failed to create connection factory.", e);
     }
   }
@@ -388,8 +383,7 @@ public class JMSBase
       connection.close();
       session = null;
       connection = null;
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       logger.debug(ex.getLocalizedMessage());
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperator.java
index 9caa833..3bb8cb9 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/JMSMultiPortOutputOperator.java
@@ -18,14 +18,20 @@
  */
 package com.datatorrent.lib.io.jms;
 
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.annotation.InputPortFieldAnnotation;
 import java.io.Serializable;
 import java.util.Map;
-import javax.jms.*;
+
+import javax.jms.BytesMessage;
+import javax.jms.JMSException;
+import javax.jms.MapMessage;
+import javax.jms.Message;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.annotation.InputPortFieldAnnotation;
+
 /**
  * @since 2.1.0
  */
@@ -108,28 +114,22 @@ public class JMSMultiPortOutputOperator extends AbstractJMSOutputOperator
     try {
       if (tuple instanceof Message) {
         return (Message)tuple;
-      }
-      else if (tuple instanceof String) {
+      } else if (tuple instanceof String) {
         return getSession().createTextMessage((String)tuple);
-      }
-      else if (tuple instanceof byte[]) {
+      } else if (tuple instanceof byte[]) {
         BytesMessage message = getSession().createBytesMessage();
         message.writeBytes((byte[])tuple);
         return message;
-      }
-      else if (tuple instanceof Map) {
+      } else if (tuple instanceof Map) {
         return createMessageForMap((Map)tuple);
-      }
-      else if (tuple instanceof Serializable) {
+      } else if (tuple instanceof Serializable) {
         return getSession().createObjectMessage((Serializable)tuple);
-      }
-      else {
+      } else {
         throw new RuntimeException("Cannot convert object of type "
-                + tuple.getClass() + "] to JMS message. Supported message "
-                + "payloads are: String, byte array, Map<String,?>, Serializable object.");
+            + tuple.getClass() + "] to JMS message. Supported message "
+            + "payloads are: String, byte array, Map<String,?>, Serializable object.");
       }
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       logger.error(ex.getLocalizedMessage());
       throw new RuntimeException(ex);
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/jms/JMSObjectInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/JMSObjectInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/jms/JMSObjectInputOperator.java
index aa68802..0bc0c79 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/JMSObjectInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/JMSObjectInputOperator.java
@@ -18,15 +18,24 @@
  */
 package com.datatorrent.lib.io.jms;
 
-import com.datatorrent.api.DefaultOutputPort;
 import java.io.Serializable;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.Map;
-import javax.jms.*;
+
+import javax.jms.BytesMessage;
+import javax.jms.JMSException;
+import javax.jms.MapMessage;
+import javax.jms.Message;
+import javax.jms.ObjectMessage;
+import javax.jms.StreamMessage;
+import javax.jms.TextMessage;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.DefaultOutputPort;
+
 /**
  * An implementation of AbstractJMSInputOperator which emits TextMessage,StreamMessage,BytesMessage,MapMessage
  * and ObjectMessage on their respective ports.
@@ -54,20 +63,15 @@ public class JMSObjectInputOperator extends AbstractJMSInputOperator<Object>
   {
     if (message instanceof TextMessage) {
       return ((TextMessage)message).getText();
-    }
-    else if (message instanceof StreamMessage) {
+    } else if (message instanceof StreamMessage) {
       return ((StreamMessage)message).readString();
-    }
-    else if (message instanceof BytesMessage) {
+    } else if (message instanceof BytesMessage) {
       return extractByteArrayFromMessage((BytesMessage)message);
-    }
-    else if (message instanceof MapMessage) {
+    } else if (message instanceof MapMessage) {
       return extractMapFromMessage((MapMessage)message);
-    }
-    else if (message instanceof ObjectMessage) {
+    } else if (message instanceof ObjectMessage) {
       return extractSerializableFromMessage((ObjectMessage)message);
-    }
-    else {
+    } else {
       return message;
     }
   }
@@ -122,19 +126,16 @@ public class JMSObjectInputOperator extends AbstractJMSInputOperator<Object>
   {
     if (outputString.isConnected()) {
       outputString.emit((String)payload);
-    }
-    else if (outputMap.isConnected()) {
+    } else if (outputMap.isConnected()) {
       outputMap.emit((Map<String, Object>)payload);
-    }
-    else if (outputBytes.isConnected()) {
+    } else if (outputBytes.isConnected()) {
       outputBytes.emit((byte[])payload);
-    }
-    else {
+    } else {
       output.emit(payload);
     }
   }
 
   @SuppressWarnings("unused")
-  private static transient final Logger logger = LoggerFactory.getLogger(JMSObjectInputOperator.class);
+  private static final transient Logger logger = LoggerFactory.getLogger(JMSObjectInputOperator.class);
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/jms/JMSTransactionableStore.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/JMSTransactionableStore.java b/library/src/main/java/com/datatorrent/lib/io/jms/JMSTransactionableStore.java
index 4c5c265..11b8447 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/JMSTransactionableStore.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/JMSTransactionableStore.java
@@ -20,7 +20,14 @@ package com.datatorrent.lib.io.jms;
 
 import java.io.IOException;
 import java.util.Enumeration;
-import javax.jms.*;
+
+import javax.jms.BytesMessage;
+import javax.jms.JMSException;
+import javax.jms.MessageConsumer;
+import javax.jms.MessageProducer;
+import javax.jms.Queue;
+import javax.jms.QueueBrowser;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -60,7 +67,7 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
     try {
 
       beginTransaction();
-      BytesMessage message = (BytesMessage) consumer.receive();
+      BytesMessage message = (BytesMessage)consumer.receive();
       logger.debug("Retrieved committed window message id {}", message.getJMSMessageID());
       long windowId = message.readLong();
 
@@ -71,8 +78,7 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
 
       logger.debug("Retrieved windowId {}", windowId);
       return windowId;
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -80,20 +86,19 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
   @Override
   public void storeCommittedWindowId(String appId, int operatorId, long windowId)
   {
-    if(!inTransaction) {
+    if (!inTransaction) {
       throw new RuntimeException("This should be called while you are in an existing transaction");
     }
 
     logger.debug("storing window appId {} operatorId {} windowId {}",
-                 appId, operatorId, windowId);
+        appId, operatorId, windowId);
     try {
       removeCommittedWindowId(appId, operatorId);
       BytesMessage bytesMessage = this.getBase().getSession().createBytesMessage();
       bytesMessage.writeLong(windowId);
       producer.send(bytesMessage);
       logger.debug("Retrieved committed window message id {}", bytesMessage.getJMSMessageID());
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -103,8 +108,7 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
   {
     try {
       consumer.receive();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -114,8 +118,7 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
   {
     logger.debug("beginning transaction");
 
-    if(inTransaction)
-    {
+    if (inTransaction) {
       throw new RuntimeException("Cannot start a transaction twice.");
     }
 
@@ -127,15 +130,13 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
   {
     logger.debug("committing transaction.");
 
-    if(!inTransaction)
-    {
+    if (!inTransaction) {
       throw new RuntimeException("Cannot commit a transaction if you are not in one.");
     }
 
     try {
       getBase().getSession().commit();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -146,12 +147,9 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
   @Override
   public void rollbackTransaction()
   {
-    try
-    {
+    try {
       getBase().getSession().rollback();
-    }
-    catch (JMSException ex)
-    {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
   }
@@ -168,12 +166,11 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
     logger.debug("Entering connect. is in transaction: {}", inTransaction);
 
     try {
-      String queueName = getQueueName(getAppId(),
-                                      getOperatorId());
+      String queueName = getQueueName(getAppId(), getOperatorId());
 
       logger.debug("Base is null: {}", getBase() == null);
 
-      if(getBase() != null) {
+      if (getBase() != null) {
         logger.debug("Session is null: {}", getBase().getSession() == null);
       }
 
@@ -184,8 +181,7 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
       try {
         Enumeration enumeration = browser.getEnumeration();
         hasStore = enumeration.hasMoreElements();
-      }
-      catch (JMSException ex) {
+      } catch (JMSException ex) {
         throw new RuntimeException(ex);
       }
 
@@ -195,15 +191,14 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
       connected = true;
       logger.debug("Connected. is in transaction: {}", inTransaction);
 
-      if(!hasStore) {
+      if (!hasStore) {
         beginTransaction();
         BytesMessage message = getBase().getSession().createBytesMessage();
         message.writeLong(-1L);
         producer.send(message);
         commitTransaction();
       }
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
 
@@ -217,8 +212,7 @@ public class JMSTransactionableStore extends JMSBaseTransactionableStore
     try {
       producer.close();
       consumer.close();
-    }
-    catch (JMSException ex) {
+    } catch (JMSException ex) {
       throw new RuntimeException(ex);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/io/jms/package-info.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/package-info.java b/library/src/main/java/com/datatorrent/lib/io/jms/package-info.java
index 49768c1..ad4f163 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/package-info.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/package-info.java
@@ -20,4 +20,4 @@
  * Library of input operators for writing into jms broker and output operators for reading from jms broker.
  * The jms operators interact with entities outside of DAG, and at times outside of Hadoop
  */
-package com.datatorrent.lib.io.jms;
\ No newline at end of file
+package com.datatorrent.lib.io.jms;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseMapOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseMapOutputOperator.java b/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseMapOutputOperator.java
index 5883b98..33bdbaa 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseMapOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseMapOutputOperator.java
@@ -24,14 +24,16 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
-import com.datatorrent.common.util.BaseOperator;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.api.annotation.Stateless;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * This operator parses apache logs one line at a time (each tuple is a log line), using the given regex.&nbsp;
@@ -83,8 +85,7 @@ public class ApacheLogParseMapOutputOperator extends BaseOperator
     {
       try {
         processTuple(s);
-      }
-      catch (ParseException ex) {
+      } catch (ParseException ex) {
         throw new RuntimeException("Could not parse the input string", ex);
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseOperator.java b/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseOperator.java
index 938a927..f1dffe8 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/ApacheLogParseOperator.java
@@ -22,11 +22,11 @@ import java.text.ParseException;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
 
 /**
  * Parse Apache log lines one line at a time.&nbsp;
@@ -58,116 +58,115 @@ import com.datatorrent.api.annotation.Stateless;
  * @since 0.3.3
  */
 @Stateless
-@OperatorAnnotation(partitionable=true)
+@OperatorAnnotation(partitionable = true)
 public class ApacheLogParseOperator extends BaseOperator
 {
   /**
    * This is the input port which receives apache log lines.
    */
-	public final transient DefaultInputPort<String> data = new DefaultInputPort<String>()
-	{
-		@Override
-		public void process(String s)
-		{
-			try {
-				processTuple(s);
-			} catch (ParseException ex) {
-				// ignore
-			}
-		}
-	};
+  public final transient DefaultInputPort<String> data = new DefaultInputPort<String>()
+  {
+    @Override
+    public void process(String s)
+    {
+      try {
+        processTuple(s);
+      } catch (ParseException ex) {
+        // ignore
+      }
+    }
+  };
 
-	/**
-	 * Client IP address, output port.
-	 */
-	public final transient DefaultOutputPort<String> outputIPAddress = new DefaultOutputPort<String>();
+  /**
+   * Client IP address, output port.
+   */
+  public final transient DefaultOutputPort<String> outputIPAddress = new DefaultOutputPort<String>();
 
-	/**
-	 * Access url port, output port.
-	 */
-	public final transient DefaultOutputPort<String> outputUrl = new DefaultOutputPort<String>();
+  /**
+   * Access url port, output port.
+   */
+  public final transient DefaultOutputPort<String> outputUrl = new DefaultOutputPort<String>();
 
-	/**
-	 * Apache status log, output port.
-	 */
-	public final transient DefaultOutputPort<String> outputStatusCode = new DefaultOutputPort<String>();
+  /**
+   * Apache status log, output port.
+   */
+  public final transient DefaultOutputPort<String> outputStatusCode = new DefaultOutputPort<String>();
 
-	/**
-	 * Number of bytes served, output port.
-	 */
-	public final transient DefaultOutputPort<Long> outputBytes = new DefaultOutputPort<Long>();
+  /**
+   * Number of bytes served, output port.
+   */
+  public final transient DefaultOutputPort<Long> outputBytes = new DefaultOutputPort<Long>();
 
-	/**
-	 * Referer name, output port.
-	 */
-	public final transient DefaultOutputPort<String> outputReferer = new DefaultOutputPort<String>();
+  /**
+   * Referer name, output port.
+   */
+  public final transient DefaultOutputPort<String> outputReferer = new DefaultOutputPort<String>();
 
-	/**
-	 * IP Agent, output port.
-	 */
-	public final transient DefaultOutputPort<String> outputAgent = new DefaultOutputPort<String>();
+  /**
+   * IP Agent, output port.
+   */
+  public final transient DefaultOutputPort<String> outputAgent = new DefaultOutputPort<String>();
 
-	/**
-	 * Get apache log pattern regex.
-	 * @return regex string.
-	 */
-	protected static String getAccessLogRegex()
-	{
-		String regex1 = "^([\\d\\.]+)"; // Client IP
-		String regex2 = " (\\S+)"; // -
-		String regex3 = " (\\S+)"; // -
-		String regex4 = " \\[([\\w:/]+\\s[+\\-]\\d{4})\\]"; // Date
-		String regex5 = " \"[A-Z]+ (.+?) HTTP/\\S+\""; // url
-		String regex6 = " (\\d{3})"; // HTTP code
-		String regex7 = " (\\d+)"; // Number of bytes
-		String regex8 = " \"([^\"]+)\""; // Referer
-		String regex9 = " \"([^\"]+)\""; // Agent
-		String regex10 = ".*"; // ignore the rest
-		return regex1 + regex2 + regex3 + regex4 + regex5 + regex6 + regex7
-				+ regex8 + regex9 + regex10;
-	}
+  /**
+   * Get apache log pattern regex.
+   * @return regex string.
+   */
+  protected static String getAccessLogRegex()
+  {
+    String regex1 = "^([\\d\\.]+)"; // Client IP
+    String regex2 = " (\\S+)"; // -
+    String regex3 = " (\\S+)"; // -
+    String regex4 = " \\[([\\w:/]+\\s[+\\-]\\d{4})\\]"; // Date
+    String regex5 = " \"[A-Z]+ (.+?) HTTP/\\S+\""; // url
+    String regex6 = " (\\d{3})"; // HTTP code
+    String regex7 = " (\\d+)"; // Number of bytes
+    String regex8 = " \"([^\"]+)\""; // Referer
+    String regex9 = " \"([^\"]+)\""; // Agent
+    String regex10 = ".*"; // ignore the rest
+    return regex1 + regex2 + regex3 + regex4 + regex5 + regex6 + regex7
+        + regex8 + regex9 + regex10;
+  }
 
-	/**
-	 * Parses Apache combined access log, and prints out the following <br>
-	 * 1. Requester IP <br>
-	 * 2. Date of Request <br>
-	 * 3. Requested Page Path
-	 *
-	 * @param line
-	 *          : tuple to parsee
-	 * @throws ParseException
-	 */
-	public void processTuple(String line) throws ParseException
-	{
-		// Apapche log attaributes on each line.
-		String url;
-		String httpStatusCode;
-		long numOfBytes;
-		String referer;
-		String agent;
-		String ipAddr;
+  /**
+   * Parses Apache combined access log, and prints out the following <br>
+   * 1. Requester IP <br>
+   * 2. Date of Request <br>
+   * 3. Requested Page Path
+   *
+   * @param line
+   *          : tuple to parsee
+   * @throws ParseException
+   */
+  public void processTuple(String line) throws ParseException
+  {
+    // Apapche log attaributes on each line.
+    String url;
+    String httpStatusCode;
+    long numOfBytes;
+    String referer;
+    String agent;
+    String ipAddr;
 
-		// Parse each log line.
-		Pattern accessLogPattern = Pattern.compile(getAccessLogRegex(),
-				Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
-		Matcher accessLogEntryMatcher;
-		accessLogEntryMatcher = accessLogPattern.matcher(line);
+    // Parse each log line.
+    Pattern accessLogPattern = Pattern.compile(getAccessLogRegex(),
+        Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
+    Matcher accessLogEntryMatcher;
+    accessLogEntryMatcher = accessLogPattern.matcher(line);
 
-		if (accessLogEntryMatcher.matches()) {
-			// System.out.println("MATCHED!");
-			ipAddr = accessLogEntryMatcher.group(1);
-			url = accessLogEntryMatcher.group(5);
-			httpStatusCode = accessLogEntryMatcher.group(6);
-			numOfBytes = Long.parseLong(accessLogEntryMatcher.group(7));
-			referer = accessLogEntryMatcher.group(8);
-			agent = accessLogEntryMatcher.group(9);
+    if (accessLogEntryMatcher.matches()) {
+      ipAddr = accessLogEntryMatcher.group(1);
+      url = accessLogEntryMatcher.group(5);
+      httpStatusCode = accessLogEntryMatcher.group(6);
+      numOfBytes = Long.parseLong(accessLogEntryMatcher.group(7));
+      referer = accessLogEntryMatcher.group(8);
+      agent = accessLogEntryMatcher.group(9);
 
-			outputIPAddress.emit(ipAddr);
-			outputUrl.emit(url);
-			outputStatusCode.emit(httpStatusCode);
-			outputBytes.emit(numOfBytes);
-			outputReferer.emit(referer);
-			outputAgent.emit(agent);
-		}
-	}
+      outputIPAddress.emit(ipAddr);
+      outputUrl.emit(url);
+      outputStatusCode.emit(httpStatusCode);
+      outputBytes.emit(numOfBytes);
+      outputReferer.emit(referer);
+      outputAgent.emit(agent);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/ApacheVirtualLogParseOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/ApacheVirtualLogParseOperator.java b/library/src/main/java/com/datatorrent/lib/logs/ApacheVirtualLogParseOperator.java
index 3861639..1ba555e 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/ApacheVirtualLogParseOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/ApacheVirtualLogParseOperator.java
@@ -18,11 +18,6 @@
  */
 package com.datatorrent.lib.logs;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.annotation.Stateless;
-
 import java.io.IOException;
 import java.text.ParseException;
 import java.util.HashMap;
@@ -30,6 +25,11 @@ import java.util.Map;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.annotation.Stateless;
+import com.datatorrent.common.util.BaseOperator;
+
 /**
  * Parse Apache log lines one line at a time.&nbsp;
  * Regex (getAccessLogRegex) is used as a parser.&nbsp;
@@ -47,146 +47,154 @@ import java.util.regex.Pattern;
  * @since 0.3.2
  */
 @Stateless
-public class ApacheVirtualLogParseOperator extends BaseOperator {
-
-    // default date format
-    protected static final String dateFormat = "dd/MMM/yyyy:HH:mm:ss Z";
-    /**
-     *
-     */
-    public final transient DefaultInputPort<String> data = new DefaultInputPort<String>() {
-        @Override
-        public void process(String s) {
-            try {
-                processTuple(s);
-            } catch (ParseException ex) {
-                // ignore
-            }
-        }
-    };
-
-    /**
-     * This output port emits the IPAddresses contained in log file lines.
-     */
-    public final transient DefaultOutputPort<String> outputIPAddress = new DefaultOutputPort<String>();
-    /**
-     * This output port emits URLs contained in log file lines.
-     */
-    public final transient DefaultOutputPort<String> outputUrl = new DefaultOutputPort<String>();
-    /**
-     * This output port emits status codes contained in log file lines.
-     */
-    public final transient DefaultOutputPort<String> outputStatusCode = new DefaultOutputPort<String>();
-    /**
-     * This output pot emits a Map for each log file line,
-     * which contains all the information extracted from the log file line.
-     */
-    public final transient DefaultOutputPort<Map<String, Integer>> outputBytes = new DefaultOutputPort<Map<String, Integer>>();
-    /**
-     * This output port emits the referers contained in the log file lines.
-     */
-    public final transient DefaultOutputPort<String> outputReferer = new DefaultOutputPort<String>();
-    /**
-     * This output port emits the agents contained in the log file lines.
-     */
-    public final transient DefaultOutputPort<String> outputAgent = new DefaultOutputPort<String>();
-    /**
-     * This output port emits the servernames contained in the log file lines.
-     */
-    public final transient DefaultOutputPort<String> outputServerName = new DefaultOutputPort<String>();
-    /**
-     * This output port emits the servernames contained in the log file lines.
-     */
-    public final transient DefaultOutputPort<String> outputServerName1 = new DefaultOutputPort<String>();
-    /**
-     * This output port emits the status codes corresponding to each url in a log file line.
-     */
-    public final transient DefaultOutputPort<Map<String, String>> outUrlStatus = new DefaultOutputPort<Map<String, String>>();
-    /**
-     * This output port emits the status associated with each server in a log file line.
-     */
-    public final transient DefaultOutputPort<Map<String, String>> outServerStatus = new DefaultOutputPort<Map<String, String>>();
-    /**
-     * This output port emits client data usage contained in log file lines.
-     */
-    public final transient DefaultOutputPort<Integer> clientDataUsage = new DefaultOutputPort<Integer>();
-    /**
-     * This output port emits the view counts contained in log file lines.
-     */
-    public final transient DefaultOutputPort<Integer> viewCount = new DefaultOutputPort<Integer>();
-
-    protected static String getAccessLogRegex() {
-    	  String regex0 = "^([^\"]+)";
-        String regex1 = " ([\\d\\.]+)";                         // Client IP
-        String regex2 = " (\\S+)";                             // -
-        String regex3 = " (\\S+)";                             // -
-        String regex4 = " \\[([\\w:/]+\\s[+\\-]\\d{4})\\]"; // Date
-        String regex5 = " \"[A-Z]+ (.+?) HTTP/\\S+\"";                       //  url
-        String regex6 = " (\\d{3})";                           // HTTP code
-        String regex7 = " (\\d+)";                     // Number of bytes
-        String regex8 = " \"([^\"]+)\"";                 // Referer
-        String regex9 = " \"([^\"]+)\"";                // Agent
-        String regex10 = ".*"; // ignore the rest
-        return regex0 + regex1 + regex2 + regex3 + regex4 + regex5 + regex6 + regex7 + regex8 + regex9 + regex10;
+public class ApacheVirtualLogParseOperator extends BaseOperator
+{
+
+  // default date format
+  protected static final String dateFormat = "dd/MMM/yyyy:HH:mm:ss Z";
+  /**
+   *
+   */
+  public final transient DefaultInputPort<String> data = new DefaultInputPort<String>()
+  {
+    @Override
+    public void process(String s)
+    {
+      try {
+        processTuple(s);
+      } catch (ParseException ex) {
+        // ignore
+      }
     }
-
-    /**
-     * Parses Apache combined access log, and prints out the following <br>1.
-     * Requester IP <br>2. Date of Request <br>3. Requested Page Path
-     *
-     * @param line : tuple to parsee
-     * @throws ParseException
-     * @throws IOException
-     */
-    public void processTuple(String line) throws ParseException {
-
-    	  // Apache log properties.
-        String url;
-        String httpStatusCode;
-        long numOfBytes;
-        String referer;
-        String agent;
-        String ipAddr;
-        String serverName;
-
-        // Parser log.
-        Pattern accessLogPattern = Pattern.compile(getAccessLogRegex(), Pattern.CASE_INSENSITIVE
-                | Pattern.DOTALL);
-        Matcher accessLogEntryMatcher;
-        accessLogEntryMatcher = accessLogPattern.matcher(line);
-
-        if (accessLogEntryMatcher.matches()) {
-
-        	  serverName = accessLogEntryMatcher.group(1);
-            ipAddr = accessLogEntryMatcher.group(2);
-            url = accessLogEntryMatcher.group(6);
-            httpStatusCode = accessLogEntryMatcher.group(7);
-            numOfBytes = Long.parseLong(accessLogEntryMatcher.group(8));
-            referer = accessLogEntryMatcher.group(9);
-            agent = accessLogEntryMatcher.group(10);
-
-            outputIPAddress.emit(ipAddr);
-            outputUrl.emit(url);
-            outputStatusCode.emit(httpStatusCode);
-            Map<String, Integer> ipdata = new HashMap<String, Integer>();
-            ipdata.put(ipAddr, (int)numOfBytes);
-            outputBytes.emit(ipdata);
-            outputReferer.emit(referer);
-            outputAgent.emit(agent);
-            outputServerName.emit(serverName);
-            outputServerName1.emit(serverName);
-
-            HashMap<String, String> urlStatus = new HashMap<String, String>();
-            urlStatus.put(url, httpStatusCode);
-            outUrlStatus.emit(urlStatus);
-
-            HashMap<String, String> serverStatus = new HashMap<String, String>();
-            serverStatus.put(serverName, httpStatusCode);
-            outServerStatus.emit(serverStatus);
-
-            clientDataUsage.emit((int)numOfBytes);
-            viewCount.emit(new Integer(1));
-        }
+  };
+
+  /**
+   * This output port emits the IPAddresses contained in log file lines.
+   */
+  public final transient DefaultOutputPort<String> outputIPAddress = new DefaultOutputPort<String>();
+  /**
+   * This output port emits URLs contained in log file lines.
+   */
+  public final transient DefaultOutputPort<String> outputUrl = new DefaultOutputPort<String>();
+  /**
+   * This output port emits status codes contained in log file lines.
+   */
+  public final transient DefaultOutputPort<String> outputStatusCode = new DefaultOutputPort<String>();
+  /**
+   * This output pot emits a Map for each log file line,
+   * which contains all the information extracted from the log file line.
+   */
+  public final transient DefaultOutputPort<Map<String, Integer>> outputBytes =
+      new DefaultOutputPort<Map<String, Integer>>();
+  /**
+   * This output port emits the referers contained in the log file lines.
+   */
+  public final transient DefaultOutputPort<String> outputReferer = new DefaultOutputPort<String>();
+  /**
+   * This output port emits the agents contained in the log file lines.
+   */
+  public final transient DefaultOutputPort<String> outputAgent = new DefaultOutputPort<String>();
+  /**
+   * This output port emits the servernames contained in the log file lines.
+   */
+  public final transient DefaultOutputPort<String> outputServerName = new DefaultOutputPort<String>();
+  /**
+   * This output port emits the servernames contained in the log file lines.
+   */
+  public final transient DefaultOutputPort<String> outputServerName1 = new DefaultOutputPort<String>();
+  /**
+   * This output port emits the status codes corresponding to each url in a log file line.
+   */
+  public final transient DefaultOutputPort<Map<String, String>> outUrlStatus =
+      new DefaultOutputPort<Map<String, String>>();
+  /**
+   * This output port emits the status associated with each server in a log file line.
+   */
+  public final transient DefaultOutputPort<Map<String, String>> outServerStatus =
+      new DefaultOutputPort<Map<String, String>>();
+  /**
+   * This output port emits client data usage contained in log file lines.
+   */
+  public final transient DefaultOutputPort<Integer> clientDataUsage = new DefaultOutputPort<Integer>();
+  /**
+   * This output port emits the view counts contained in log file lines.
+   */
+  public final transient DefaultOutputPort<Integer> viewCount = new DefaultOutputPort<Integer>();
+
+  protected static String getAccessLogRegex()
+  {
+    String regex0 = "^([^\"]+)";
+    String regex1 = " ([\\d\\.]+)";                         // Client IP
+    String regex2 = " (\\S+)";                             // -
+    String regex3 = " (\\S+)";                             // -
+    String regex4 = " \\[([\\w:/]+\\s[+\\-]\\d{4})\\]"; // Date
+    String regex5 = " \"[A-Z]+ (.+?) HTTP/\\S+\"";                       //  url
+    String regex6 = " (\\d{3})";                           // HTTP code
+    String regex7 = " (\\d+)";                     // Number of bytes
+    String regex8 = " \"([^\"]+)\"";                 // Referer
+    String regex9 = " \"([^\"]+)\"";                // Agent
+    String regex10 = ".*"; // ignore the rest
+    return regex0 + regex1 + regex2 + regex3 + regex4 + regex5 + regex6 + regex7 + regex8 + regex9 + regex10;
+  }
+
+  /**
+   * Parses Apache combined access log, and prints out the following <br>1.
+   * Requester IP <br>2. Date of Request <br>3. Requested Page Path
+   *
+   * @param line : tuple to parsee
+   * @throws ParseException
+   * @throws IOException
+   */
+  public void processTuple(String line) throws ParseException
+  {
+
+    // Apache log properties.
+    String url;
+    String httpStatusCode;
+    long numOfBytes;
+    String referer;
+    String agent;
+    String ipAddr;
+    String serverName;
+
+    // Parser log.
+    Pattern accessLogPattern = Pattern.compile(getAccessLogRegex(), Pattern.CASE_INSENSITIVE
+        | Pattern.DOTALL);
+    Matcher accessLogEntryMatcher;
+    accessLogEntryMatcher = accessLogPattern.matcher(line);
+
+    if (accessLogEntryMatcher.matches()) {
+
+      serverName = accessLogEntryMatcher.group(1);
+      ipAddr = accessLogEntryMatcher.group(2);
+      url = accessLogEntryMatcher.group(6);
+      httpStatusCode = accessLogEntryMatcher.group(7);
+      numOfBytes = Long.parseLong(accessLogEntryMatcher.group(8));
+      referer = accessLogEntryMatcher.group(9);
+      agent = accessLogEntryMatcher.group(10);
+
+      outputIPAddress.emit(ipAddr);
+      outputUrl.emit(url);
+      outputStatusCode.emit(httpStatusCode);
+      Map<String, Integer> ipdata = new HashMap<String, Integer>();
+      ipdata.put(ipAddr, (int)numOfBytes);
+      outputBytes.emit(ipdata);
+      outputReferer.emit(referer);
+      outputAgent.emit(agent);
+      outputServerName.emit(serverName);
+      outputServerName1.emit(serverName);
+
+      HashMap<String, String> urlStatus = new HashMap<String, String>();
+      urlStatus.put(url, httpStatusCode);
+      outUrlStatus.emit(urlStatus);
+
+      HashMap<String, String> serverStatus = new HashMap<String, String>();
+      serverStatus.put(serverName, httpStatusCode);
+      outServerStatus.emit(serverStatus);
+
+      clientDataUsage.emit((int)numOfBytes);
+      viewCount.emit(new Integer(1));
     }
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/DimensionAggregationUnifier.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/DimensionAggregationUnifier.java b/library/src/main/java/com/datatorrent/lib/logs/DimensionAggregationUnifier.java
index 2c6ad20..6c56b11 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/DimensionAggregationUnifier.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/DimensionAggregationUnifier.java
@@ -43,8 +43,10 @@ public class DimensionAggregationUnifier implements Operator
 
   private Map<String, Map<String, MutableDouble>> dataMap = new HashMap<String, Map<String, MutableDouble>>();
 
-  public final transient DefaultOutputPort<Map<String, DimensionObject<String>>> output = new DefaultOutputPort<Map<String, DimensionObject<String>>>();
-  public final transient DefaultInputPort<Map<String, DimensionObject<String>>> input = new DefaultInputPort<Map<String, DimensionObject<String>>>() {
+  public final transient DefaultOutputPort<Map<String, DimensionObject<String>>> output = new DefaultOutputPort<>();
+
+  public final transient DefaultInputPort<Map<String, DimensionObject<String>>> input = new DefaultInputPort<Map<String, DimensionObject<String>>>()
+  {
 
     @Override
     public void process(Map<String, DimensionObject<String>> tuple)

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/DimensionObject.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/DimensionObject.java b/library/src/main/java/com/datatorrent/lib/logs/DimensionObject.java
index f6c402a..cba895f 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/DimensionObject.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/DimensionObject.java
@@ -35,7 +35,8 @@ public class DimensionObject<T> implements Comparable<DimensionObject<T>>
   private T val;
 
   @SuppressWarnings("unused")
-  private DimensionObject(){
+  private DimensionObject()
+  {
 
   }
 
@@ -86,12 +87,14 @@ public class DimensionObject<T> implements Comparable<DimensionObject<T>>
   @Override
   public boolean equals(Object obj)
   {
-    if (obj == null)
+    if (obj == null) {
       return false;
-    if (!this.getClass().equals(obj.getClass()))
+    }
+    if (!this.getClass().equals(obj.getClass())) {
       return false;
+    }
     @SuppressWarnings("unchecked")
-    DimensionObject<T> obj2 = (DimensionObject<T>) obj;
+    DimensionObject<T> obj2 = (DimensionObject<T>)obj;
     return this.val.equals(obj2.val);
 
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayList.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayList.java b/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayList.java
index 20dcf43..de0e1bd 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayList.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenArrayList.java
@@ -18,9 +18,10 @@
  */
 package com.datatorrent.lib.logs;
 
+import java.util.HashMap;
+
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.api.annotation.Stateless;
-import java.util.HashMap;
 
 /**
  * <p>
@@ -53,7 +54,7 @@ import java.util.HashMap;
  * @since 0.3.2
  */
 @Stateless
-@OperatorAnnotation(partitionable=true)
+@OperatorAnnotation(partitionable = true)
 public class FilteredLineToTokenArrayList extends LineToTokenArrayList
 {
   HashMap<String, Object> filterBy = new HashMap<String, Object>();

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMap.java b/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMap.java
index f67b0b7..c79e884 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMap.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMap.java
@@ -18,9 +18,10 @@
  */
 package com.datatorrent.lib.logs;
 
+import java.util.HashMap;
+
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.api.annotation.Stateless;
-import java.util.HashMap;
 
 /**
  * <p>
@@ -55,35 +56,35 @@ import java.util.HashMap;
  * @since 0.3.3
  */
 @Stateless
-@OperatorAnnotation(partitionable=true)
+@OperatorAnnotation(partitionable = true)
 public class FilteredLineToTokenHashMap extends LineToTokenHashMap
 {
-	HashMap<String, Object> filterBy = new HashMap<String, Object>(4);
+  HashMap<String, Object> filterBy = new HashMap<String, Object>(4);
 
-	/**
-	 * setter function for filterBy
-	 *
-	 * @param list
-	 *          list of keys for subtoken filters
-	 */
-	public void setFilterBy(String[] list)
-	{
-		if (list != null) {
-			for (String s : list) {
-				filterBy.put(s, null);
-			}
-		}
-	}
+  /**
+   * setter function for filterBy
+   *
+   * @param list
+   *          list of keys for subtoken filters
+   */
+  public void setFilterBy(String[] list)
+  {
+    if (list != null) {
+      for (String s : list) {
+        filterBy.put(s, null);
+      }
+    }
+  }
 
-	/**
-	 * If the key is in the filter, returns true
-	 *
-	 * @param subtok
-	 * @return true if super.validToken (!isEmpty()) and filter has they token
-	 */
-	@Override
-	public boolean validSubTokenKey(String subtok)
-	{
-		return super.validToken(subtok) && filterBy.containsKey(subtok);
-	}
+  /**
+   * If the key is in the filter, returns true
+   *
+   * @param subtok
+   * @return true if super.validToken (!isEmpty()) and filter has they token
+   */
+  @Override
+  public boolean validSubTokenKey(String subtok)
+  {
+    return super.validToken(subtok) && filterBy.containsKey(subtok);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyVal.java b/library/src/main/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyVal.java
index 37e66ff..a6acd4c 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/FilteredLineTokenizerKeyVal.java
@@ -18,9 +18,10 @@
  */
 package com.datatorrent.lib.logs;
 
+import java.util.HashMap;
+
 import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.api.annotation.Stateless;
-import java.util.HashMap;
 
 /**
  * Splits lines into tokens, and tokens into sub-tokens and emits key,val pairs in a HashMap.&nbsp;
@@ -52,35 +53,35 @@ import java.util.HashMap;
  * @since 0.3.3
  */
 @Stateless
-@OperatorAnnotation(partitionable=true)
+@OperatorAnnotation(partitionable = true)
 public class FilteredLineTokenizerKeyVal extends LineTokenizerKeyVal
 {
-	HashMap<String, Object> filterBy = new HashMap<String, Object>(4);
+  HashMap<String, Object> filterBy = new HashMap<String, Object>(4);
 
-	/**
-	 * setter function for filterBy
-	 *
-	 * @param list
-	 *          list of keys for subtoken filters
-	 */
-	public void setFilterBy(String[] list)
-	{
-		if (list != null) {
-			for (String s : list) {
-				filterBy.put(s, null);
-			}
-		}
-	}
+  /**
+   * setter function for filterBy
+   *
+   * @param list
+   *          list of keys for subtoken filters
+   */
+  public void setFilterBy(String[] list)
+  {
+    if (list != null) {
+      for (String s : list) {
+        filterBy.put(s, null);
+      }
+    }
+  }
 
-	/**
-	 * If the key is in the filter, returns true
-	 *
-	 * @param subtok
-	 * @return true if super.validToken (!isEmpty()) and filter has they token
-	 */
-	@Override
-	public boolean validSubTokenKey(String subtok)
-	{
-		return super.validToken(subtok) && filterBy.containsKey(subtok);
-	}
+  /**
+   * If the key is in the filter, returns true
+   *
+   * @param subtok
+   * @return true if super.validToken (!isEmpty()) and filter has they token
+   */
+  @Override
+  public boolean validSubTokenKey(String subtok)
+  {
+    return super.validToken(subtok) && filterBy.containsKey(subtok);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/LineToTokenArrayList.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/LineToTokenArrayList.java b/library/src/main/java/com/datatorrent/lib/logs/LineToTokenArrayList.java
index ffc211d..182078b 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/LineToTokenArrayList.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/LineToTokenArrayList.java
@@ -56,7 +56,7 @@ import com.datatorrent.lib.util.UnifierArrayList;
  * @since 0.3.2
  */
 @Stateless
-@OperatorAnnotation(partitionable=true)
+@OperatorAnnotation(partitionable = true)
 public class LineToTokenArrayList extends BaseLineTokenizer
 {
   protected transient ArrayList<String> tokentuple = null;
@@ -79,15 +79,16 @@ public class LineToTokenArrayList extends BaseLineTokenizer
   };
 
   /**
-	 * This output port emits a map from tokens to sub tokens.
-	 */
+   * This output port emits a map from tokens to sub tokens.
+   */
   @OutputPortFieldAnnotation(optional = true)
-  public final transient DefaultOutputPort<ArrayList<HashMap<String, ArrayList<String>>>> splittokens = new DefaultOutputPort<ArrayList<HashMap<String, ArrayList<String>>>>()
+  public final transient DefaultOutputPort<ArrayList<HashMap<String, ArrayList<String>>>> splittokens =
+      new DefaultOutputPort<ArrayList<HashMap<String, ArrayList<String>>>>()
   {
     @Override
     public Unifier<ArrayList<HashMap<String, ArrayList<String>>>> getUnifier()
     {
-      return new UnifierArrayList<HashMap<String, ArrayList<String>>>();
+      return new UnifierArrayList<>();
     }
   };
 
@@ -152,8 +153,7 @@ public class LineToTokenArrayList extends BaseLineTokenizer
   {
     if (smap.isEmpty()) {
       smap.put(subtok, vals);
-    }
-    else {
+    } else {
       vals.add(subtok);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/LineToTokenHashMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/LineToTokenHashMap.java b/library/src/main/java/com/datatorrent/lib/logs/LineToTokenHashMap.java
index d0a67f3..0060c74 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/LineToTokenHashMap.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/LineToTokenHashMap.java
@@ -54,7 +54,7 @@ import com.datatorrent.lib.util.UnifierHashMap;
  * @since 0.3.2
  */
 @Stateless
-@OperatorAnnotation(partitionable=true)
+@OperatorAnnotation(partitionable = true)
 public class LineToTokenHashMap extends BaseLineTokenizer
 {
   /**
@@ -103,8 +103,7 @@ public class LineToTokenHashMap extends BaseLineTokenizer
     if (vals == null) {
       tok = subtok;
       vals = new ArrayList<String>();
-    }
-    else {
+    } else {
       vals.add(subtok);
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/LineTokenizer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/LineTokenizer.java b/library/src/main/java/com/datatorrent/lib/logs/LineTokenizer.java
index 1ddd40c..814d132 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/LineTokenizer.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/LineTokenizer.java
@@ -19,7 +19,8 @@
 package com.datatorrent.lib.logs;
 
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.annotation.*;
+import com.datatorrent.api.annotation.OperatorAnnotation;
+import com.datatorrent.api.annotation.Stateless;
 import com.datatorrent.lib.util.BaseLineTokenizer;
 
 /**
@@ -47,25 +48,25 @@ import com.datatorrent.lib.util.BaseLineTokenizer;
  * @since 0.3.3
  */
 @Stateless
-@OperatorAnnotation(partitionable=true)
+@OperatorAnnotation(partitionable = true)
 public class LineTokenizer extends BaseLineTokenizer
 {
   /**
    * The is the output port that emits string tokens.
    */
-	public final transient DefaultOutputPort<String> tokens = new DefaultOutputPort<String>();
+  public final transient DefaultOutputPort<String> tokens = new DefaultOutputPort<String>();
 
-	/**
-	 * emits tokens on port "tokens" if tok is not empty
-	 *
-	 * @param tok
-	 */
-	@Override
-	public void processToken(String tok)
-	{
-		if (!tok.isEmpty()) {
-			tokens.emit(tok);
-		}
-	}
+  /**
+   * emits tokens on port "tokens" if tok is not empty
+   *
+   * @param tok
+   */
+  @Override
+  public void processToken(String tok)
+  {
+    if (!tok.isEmpty()) {
+      tokens.emit(tok);
+    }
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/logs/LineTokenizerKeyVal.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/logs/LineTokenizerKeyVal.java b/library/src/main/java/com/datatorrent/lib/logs/LineTokenizerKeyVal.java
index 2a568d1..0d89996 100644
--- a/library/src/main/java/com/datatorrent/lib/logs/LineTokenizerKeyVal.java
+++ b/library/src/main/java/com/datatorrent/lib/logs/LineTokenizerKeyVal.java
@@ -18,11 +18,13 @@
  */
 package com.datatorrent.lib.logs;
 
+import java.util.HashMap;
+
 import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.annotation.*;
+import com.datatorrent.api.annotation.OperatorAnnotation;
+import com.datatorrent.api.annotation.Stateless;
 import com.datatorrent.lib.util.BaseLineTokenizer;
 import com.datatorrent.lib.util.UnifierHashMap;
-import java.util.HashMap;
 
 /**
  * This operator splits lines into tokens, and tokens into sub-tokens.&nbsp;
@@ -44,7 +46,8 @@ import java.util.HashMap;
  * <br>
  * <b>Properties</b>:<br>
  * <b>splitby</b>: The characters used to split the line. Default is ";\t "<br>
- * <b>splittokenby</b>: The characters used to split a token into key,val pair. Default is "", i.e. tokens are not split, and key is set to token, and val is null<br>
+ * <b>splittokenby</b>: The characters used to split a token into key,val pair. Default is "", i.e. tokens are not
+ * split, and key is set to token, and val is null<br>
  * <br>
  * </p>
  * @displayName Line Tokenizer Key Value
@@ -54,7 +57,7 @@ import java.util.HashMap;
  * @since 0.3.2
  */
 @Stateless
-@OperatorAnnotation(partitionable=true)
+@OperatorAnnotation(partitionable = true)
 public class LineTokenizerKeyVal extends BaseLineTokenizer
 {
   /**
@@ -129,8 +132,7 @@ public class LineTokenizerKeyVal extends BaseLineTokenizer
   {
     if (skey.isEmpty()) {
       skey = subtok;
-    }
-    else if (sval.isEmpty()) {
+    } else if (sval.isEmpty()) {
       sval = subtok;
     }
   }


[03/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java b/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java
index 59b2c4d..10e0f5e 100644
--- a/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java
+++ b/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java
@@ -23,14 +23,16 @@ import java.util.Comparator;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.lang.ObjectUtils.Null;
 import org.junit.Assert;
 import org.junit.Test;
 
-import com.datatorrent.lib.testbench.CollectorTestSink;
+import org.apache.commons.lang.ObjectUtils.Null;
+
 import com.google.common.base.Function;
 import com.google.common.collect.Lists;
 
+import com.datatorrent.lib.testbench.CollectorTestSink;
+
 /**
  * A unit test to test SortedMovingWindow operator can either:
  * 1. sort simple comparable tuples
@@ -45,7 +47,8 @@ public class SortedMovingWindowTest
    * Test sorting simple comparable tuples within the sliding window
    */
   @Test
-  public void testSortingSimpleNumberTuple(){
+  public void testSortingSimpleNumberTuple()
+  {
     SortedMovingWindow<Integer, Null> smw = new SortedMovingWindow<Integer, Null>();
     CollectorTestSink<Object> testSink = new CollectorTestSink<Object>();
     smw.outputPort.setSink(testSink);
@@ -73,9 +76,10 @@ public class SortedMovingWindowTest
 
     SortedMovingWindow<Map<String, Integer>, Null> smw = new SortedMovingWindow<Map<String, Integer>, Null>();
 
-    final String[] keys = { "number" };
+    final String[] keys = {"number"};
 
-    smw.setComparator(new Comparator<Map<String, Integer>>() {
+    smw.setComparator(new Comparator<Map<String, Integer>>()
+    {
       @Override
       public int compare(Map<String, Integer> o1, Map<String, Integer> o2)
       {
@@ -89,15 +93,17 @@ public class SortedMovingWindowTest
     smw.setWindowSize(2);
 
     // The incoming 6 simple map tuples are disordered among 4 windows 
-    emitObjects(smw, new Map[][] { createHashMapTuples(keys, new Integer[][] { { 1 }, { 3 } }), createHashMapTuples(keys, new Integer[][] { { 2 }, { 5 } }), 
-        createHashMapTuples(keys, new Integer[][] { { 4 } }), createHashMapTuples(keys, new Integer[][] { { 6 } }) });
+    emitObjects(smw, new Map[][]{createHashMapTuples(keys, new Integer[][]{{1}, {3}}),
+        createHashMapTuples(keys, new Integer[][]{{2}, {5}}),
+        createHashMapTuples(keys, new Integer[][]{{4}}), createHashMapTuples(keys, new Integer[][]{{6}})});
     smw.beginWindow(4);
     smw.endWindow();
     smw.beginWindow(5);
     smw.endWindow();
 
     // The outcome is ordered by the value of the key "number"
-    Assert.assertEquals(Arrays.asList(createHashMapTuples(keys, new Integer[][] { { 1 }, { 2 }, { 3 }, { 4 }, { 5 }, { 6 } })), testSink.collectedTuples);
+    Assert.assertEquals(Arrays.asList(createHashMapTuples(keys, new Integer[][]{{1}, {2}, {3}, {4}, {5}, {6}})),
+        testSink.collectedTuples);
   }
   
   
@@ -110,9 +116,10 @@ public class SortedMovingWindowTest
 
     SortedMovingWindow<Map<String, Object>, String> smw = new SortedMovingWindow<Map<String, Object>, String>();
 
-    final String[] keys = { "name", "number" };
+    final String[] keys = {"name", "number"};
 
-    smw.setComparator(new Comparator<Map<String, Object>>() {
+    smw.setComparator(new Comparator<Map<String, Object>>()
+    {
       @Override
       public int compare(Map<String, Object> o1, Map<String, Object> o2)
       {
@@ -121,12 +128,13 @@ public class SortedMovingWindowTest
       }
     });
     
-    smw.setFunction(new Function<Map<String,Object>, String>() {
+    smw.setFunction(new Function<Map<String,Object>, String>()
+    {
       @Override
       public String apply(Map<String, Object> input)
       {
         // order tuple with same key "name"
-        return (String) input.get(keys[0]);
+        return (String)input.get(keys[0]);
       }
     });
     CollectorTestSink<Object> testSink = new CollectorTestSink<Object>();
@@ -135,23 +143,23 @@ public class SortedMovingWindowTest
     smw.setWindowSize(2);
 
     // The incoming 9 complex map tuples are disordered with same name among 4 windows 
-    emitObjects(smw, new Map[][] { createHashMapTuples(keys, new Object[][] { {"bob", 1 }, {"jim", 1 } }), createHashMapTuples(keys, new Object[][] { {"jim", 2 }, { "bob", 3 } }), 
-        createHashMapTuples(keys, new Object[][] { { "bob", 2 }, { "jim", 4} }), createHashMapTuples(keys, new Object[][] { {"bob", 5}, {"jim", 3 }, {"bob", 4} }) });
+    emitObjects(smw, new Map[][]{createHashMapTuples(keys, new Object[][]{{"bob", 1}, {"jim", 1}}),
+        createHashMapTuples(keys, new Object[][]{{"jim", 2}, {"bob", 3}}),
+        createHashMapTuples(keys, new Object[][]{{"bob", 2}, {"jim", 4}}),
+        createHashMapTuples(keys, new Object[][]{{"bob", 5}, {"jim", 3}, {"bob", 4}})});
     smw.beginWindow(4);
     smw.endWindow();
     smw.beginWindow(5);
     smw.endWindow();
 
     // All tuples with same "name" are sorted by key "number"
-    Assert.assertEquals(Arrays.asList(createHashMapTuples(keys, new Object[][] { { "bob", 1 }, { "jim", 1 }, { "jim", 2 }, { "bob", 2 },
-        { "bob", 3 }, { "jim", 3 }, { "jim", 4 }, { "bob", 4 }, { "bob", 5 } })), testSink.collectedTuples);
+    Assert.assertEquals(Arrays.asList(createHashMapTuples(keys,
+        new Object[][]{{"bob", 1}, {"jim", 1}, {"jim", 2}, {"bob", 2}, {"bob", 3}, {"jim", 3}, {"jim", 4}, {"bob", 4}, {"bob", 5}})), testSink.collectedTuples);
   }
   
-  
-  
-  
-  @SuppressWarnings({ "rawtypes", "unchecked" })
-  private void emitObjects(SortedMovingWindow win, Object[][] obj){
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  private void emitObjects(SortedMovingWindow win, Object[][] obj)
+  {
     for (int i = 0; i < obj.length; i++) {
       win.beginWindow(i);
       for (int j = 0; j < obj[i].length; j++) {
@@ -161,8 +169,9 @@ public class SortedMovingWindowTest
     }
   }
   
-  @SuppressWarnings({ "rawtypes", "unchecked" })
-  private Map[] createHashMapTuples(String[] cols, Object[][] values){
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  private Map[] createHashMapTuples(String[] cols, Object[][] values)
+  {
     
     HashMap[] maps = new HashMap[values.length];
     int index = -1;

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/partitioner/StatelessPartitionerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/partitioner/StatelessPartitionerTest.java b/library/src/test/java/com/datatorrent/lib/partitioner/StatelessPartitionerTest.java
index 3a6f7fe..8d52f7e 100644
--- a/library/src/test/java/com/datatorrent/lib/partitioner/StatelessPartitionerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/partitioner/StatelessPartitionerTest.java
@@ -21,20 +21,21 @@ package com.datatorrent.lib.partitioner;
 import java.util.Collection;
 import java.util.List;
 
-import com.google.common.collect.Lists;
-
 import org.junit.Assert;
 import org.junit.Test;
 
-import com.datatorrent.api.*;
+import com.google.common.collect.Lists;
+
 import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.DefaultPartition;
+import com.datatorrent.api.Operator;
 import com.datatorrent.api.Operator.InputPort;
+import com.datatorrent.api.Partitioner;
 import com.datatorrent.api.Partitioner.Partition;
 import com.datatorrent.api.StringCodec.Object2String;
-
-import com.datatorrent.lib.util.TestUtils;
-
 import com.datatorrent.common.partitioner.StatelessPartitioner;
+import com.datatorrent.lib.util.TestUtils;
 
 public class StatelessPartitionerTest
 {
@@ -101,7 +102,7 @@ public class StatelessPartitionerTest
     Collection<Partition<DummyOperator>> newPartitions = statelessPartitioner.definePartitions(partitions, new PartitioningContextImpl(null, 0));
     Assert.assertEquals("Incorrect number of partitions", 1, newPartitions.size());
 
-    for(Partition<DummyOperator> partition: newPartitions) {
+    for (Partition<DummyOperator> partition : newPartitions) {
       Assert.assertEquals("Incorrect cloned value", 5, partition.getPartitionedInstance().getValue());
     }
   }
@@ -119,7 +120,7 @@ public class StatelessPartitionerTest
     Collection<Partition<DummyOperator>> newPartitions = statelessPartitioner.definePartitions(partitions, new PartitioningContextImpl(null, 0));
     Assert.assertEquals("Incorrect number of partitions", 5, newPartitions.size());
 
-    for(Partition<DummyOperator> partition: newPartitions) {
+    for (Partition<DummyOperator> partition : newPartitions) {
       Assert.assertEquals("Incorrect cloned value", 5, partition.getPartitionedInstance().getValue());
     }
   }
@@ -149,7 +150,7 @@ public class StatelessPartitionerTest
     partitions.add(mockPartition);
 
     Collection<Partition<DummyOperator>> newPartitions = statelessPartitioner.definePartitions(partitions,
-      new PartitioningContextImpl(null, 5));
+        new PartitioningContextImpl(null, 5));
     Assert.assertEquals("after partition", 5, newPartitions.size());
   }
 
@@ -172,7 +173,7 @@ public class StatelessPartitionerTest
     }
 
     Collection<Partition<DummyOperator>> newPartitions = statelessPartitioner.definePartitions(partitions,
-      new PartitioningContextImpl(null, 1));
+        new PartitioningContextImpl(null, 1));
     Assert.assertEquals("after partition", 1, newPartitions.size());
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/partitioner/StatelessThroughputBasedPartitionerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/partitioner/StatelessThroughputBasedPartitionerTest.java b/library/src/test/java/com/datatorrent/lib/partitioner/StatelessThroughputBasedPartitionerTest.java
index 16647ab..6eaebc3 100644
--- a/library/src/test/java/com/datatorrent/lib/partitioner/StatelessThroughputBasedPartitionerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/partitioner/StatelessThroughputBasedPartitionerTest.java
@@ -18,7 +18,6 @@
  */
 package com.datatorrent.lib.partitioner;
 
-import com.datatorrent.api.*;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
@@ -29,6 +28,13 @@ import org.junit.Test;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
+import com.datatorrent.api.Context;
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.DefaultOutputPort;
+import com.datatorrent.api.DefaultPartition;
+import com.datatorrent.api.Operator;
+import com.datatorrent.api.Partitioner;
+import com.datatorrent.api.StatsListener;
 import com.datatorrent.lib.util.TestUtils;
 
 /**
@@ -150,8 +156,8 @@ public class StatelessThroughputBasedPartitionerTest
     partitions.clear();
     partitions.add(mockPartition);
 
-    Collection<Partitioner.Partition<DummyOperator>> newPartitions = statelessLatencyBasedPartitioner.definePartitions(partitions,
-                                                                                                                                                            new StatelessPartitionerTest.PartitioningContextImpl(ports, 5));
+    Collection<Partitioner.Partition<DummyOperator>> newPartitions = statelessLatencyBasedPartitioner.definePartitions(
+        partitions, new StatelessPartitionerTest.PartitioningContextImpl(ports, 5));
     Assert.assertEquals("after partition", 2, newPartitions.size());
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/script/JavaScriptOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/script/JavaScriptOperatorTest.java b/library/src/test/java/com/datatorrent/lib/script/JavaScriptOperatorTest.java
index f3f69fb..3ad30df 100644
--- a/library/src/test/java/com/datatorrent/lib/script/JavaScriptOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/script/JavaScriptOperatorTest.java
@@ -56,7 +56,7 @@ public class JavaScriptOperatorTest
     // Validate value.
     Assert.assertEquals("number emitted tuples", 1, sink.collectedTuples.size());
     for (Object o : sink.collectedTuples) { // count is 12
-      Assert.assertEquals("4.0 is expected", (Double) o, 4.0, 0);
+      Assert.assertEquals("4.0 is expected", (Double)o, 4.0, 0);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java b/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java
index 1cf89ff..47fa2c2 100644
--- a/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java
@@ -18,12 +18,12 @@
  */
 package com.datatorrent.lib.statistics;
 
-import com.datatorrent.lib.testbench.CollectorTestSink;
-import com.datatorrent.lib.util.TestUtils;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.datatorrent.lib.testbench.CollectorTestSink;
+import com.datatorrent.lib.util.TestUtils;
+
 /**
  * Functional Test for {@link com.datatorrent.lib.statistics.WeightedMeanOperator}. <br>
  */

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java b/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java
index 158fed5..26e94c6 100644
--- a/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java
@@ -18,12 +18,12 @@
  */
 package com.datatorrent.lib.statistics;
 
-import com.datatorrent.lib.testbench.CollectorTestSink;
-import com.datatorrent.lib.util.TestUtils;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.datatorrent.lib.testbench.CollectorTestSink;
+import com.datatorrent.lib.util.TestUtils;
+
 /**
  * Functional Test for {@link com.datatorrent.lib.statistics.WeightedMeanOperator}. <br>
  */

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java b/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java
index e3788c2..f9589db 100644
--- a/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java
@@ -18,12 +18,12 @@
  */
 package com.datatorrent.lib.statistics;
 
-import com.datatorrent.lib.testbench.CollectorTestSink;
-import com.datatorrent.lib.util.TestUtils;
-
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.datatorrent.lib.testbench.CollectorTestSink;
+import com.datatorrent.lib.util.TestUtils;
+
 /**
  * Functional Test for {@link com.datatorrent.lib.statistics.WeightedMeanOperator}. <br>
  */

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/ArrayListAggregatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/ArrayListAggregatorTest.java b/library/src/test/java/com/datatorrent/lib/stream/ArrayListAggregatorTest.java
index 76983f5..13d6097 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/ArrayListAggregatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/ArrayListAggregatorTest.java
@@ -30,36 +30,35 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class ArrayListAggregatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
+  @SuppressWarnings({ "rawtypes", "unchecked" })
   @Test
-	public void testNodeProcessing() throws Exception
-	{
-		ArrayListAggregator<Integer> oper = new ArrayListAggregator<Integer>();
-		CollectorTestSink cSink = new CollectorTestSink();
+  public void testNodeProcessing() throws Exception
+  {
+    ArrayListAggregator<Integer> oper = new ArrayListAggregator<Integer>();
+    CollectorTestSink cSink = new CollectorTestSink();
 
-		oper.output.setSink(cSink);
-		oper.setSize(10);
-		int numtuples = 100;
+    oper.output.setSink(cSink);
+    oper.setSize(10);
+    int numtuples = 100;
 
-		oper.beginWindow(0);
-		for (int i = 0; i < numtuples; i++) {
-			oper.input.process(i);
-		}
-		oper.endWindow();
-		Assert.assertEquals("number emitted tuples", 10,
-				cSink.collectedTuples.size());
+    oper.beginWindow(0);
+    for (int i = 0; i < numtuples; i++) {
+      oper.input.process(i);
+    }
+    oper.endWindow();
+    Assert.assertEquals("number emitted tuples", 10,
+        cSink.collectedTuples.size());
 
-		cSink.clear();
-		oper.setSize(0);
+    cSink.clear();
+    oper.setSize(0);
 
-		oper.beginWindow(1);
-		for (int i = 0; i < numtuples; i++) {
-			oper.input.process(i);
-		}
-		oper.endWindow();
-		Assert.assertEquals("number emitted tuples", 1,
-				cSink.collectedTuples.size());
-		ArrayList<?> list = (ArrayList<?>) cSink.collectedTuples.get(0);
-		Assert.assertEquals("number emitted tuples", numtuples, list.size());
-	}
+    oper.beginWindow(1);
+    for (int i = 0; i < numtuples; i++) {
+      oper.input.process(i);
+    }
+    oper.endWindow();
+    Assert.assertEquals("number emitted tuples", 1, cSink.collectedTuples.size());
+    ArrayList<?> list = (ArrayList<?>)cSink.collectedTuples.get(0);
+    Assert.assertEquals("number emitted tuples", numtuples, list.size());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/ArrayListToItemTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/ArrayListToItemTest.java b/library/src/test/java/com/datatorrent/lib/stream/ArrayListToItemTest.java
index fe0edc8..6f1504d 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/ArrayListToItemTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/ArrayListToItemTest.java
@@ -30,29 +30,29 @@ import com.datatorrent.lib.testbench.CountTestSink;
  * Benchmarks: Currently does about ?? Million tuples/sec in debugging environment. Need to test on larger nodes<br>
  * <br>
  */
-public class ArrayListToItemTest {
-	
-    /**
-     * Test operator pass through. The Object passed is not relevant
-     */
-    @SuppressWarnings({ "rawtypes", "unchecked" })
-		@Test
-    public void testNodeProcessing() throws Exception
-    {
-      ArrayListToItem oper = new ArrayListToItem();
-      CountTestSink itemSink = new CountTestSink();
-      oper.item.setSink(itemSink);
+public class ArrayListToItemTest
+{
+  /**
+   * Test operator pass through. The Object passed is not relevant
+   */
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    ArrayListToItem oper = new ArrayListToItem();
+    CountTestSink itemSink = new CountTestSink();
+    oper.item.setSink(itemSink);
 
-      oper.beginWindow(0);
-      ArrayList<String> input = new ArrayList<String>();
-      input.add("a");
-      // Same input object can be used as the oper is just pass through
-      int numtuples = 1000;
-      for (int i = 0; i < numtuples; i++) {
-        oper.data.process(input);
-      }
-
-      oper.endWindow();
-      Assert.assertEquals("number emitted tuples", numtuples, itemSink.count);
+    oper.beginWindow(0);
+    ArrayList<String> input = new ArrayList<String>();
+    input.add("a");
+    // Same input object can be used as the oper is just pass through
+    int numtuples = 1000;
+    for (int i = 0; i < numtuples; i++) {
+      oper.data.process(input);
     }
+
+    oper.endWindow();
+    Assert.assertEquals("number emitted tuples", numtuples, itemSink.count);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/ConsolidatorKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/ConsolidatorKeyValTest.java b/library/src/test/java/com/datatorrent/lib/stream/ConsolidatorKeyValTest.java
index 697ae34..de419fd 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/ConsolidatorKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/ConsolidatorKeyValTest.java
@@ -32,25 +32,25 @@ import com.datatorrent.lib.util.KeyValPair;
  */
 public class ConsolidatorKeyValTest
 {
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		ConsolidatorKeyVal<String, Integer, Double, Integer, Integer, Integer> oper =
-				new ConsolidatorKeyVal<String, Integer, Double, Integer, Integer, Integer>();
-		CollectorTestSink cSink = new CollectorTestSink();
-		oper.out.setSink(cSink);
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    ConsolidatorKeyVal<String, Integer, Double, Integer, Integer, Integer> oper =
+        new ConsolidatorKeyVal<String, Integer, Double, Integer, Integer, Integer>();
+    CollectorTestSink cSink = new CollectorTestSink();
+    oper.out.setSink(cSink);
 
-		oper.beginWindow(0);
-		KeyValPair<String, Integer> m1 = new KeyValPair<String, Integer>("a",1);
-		oper.in1.process(m1);
-		KeyValPair<String, Double> m2 = new KeyValPair<String, Double>("a",1.0);
-		oper.in2.process(m2);
-		oper.endWindow();
-		Assert.assertEquals("number emitted tuples", 1, cSink.collectedTuples.size());
+    oper.beginWindow(0);
+    KeyValPair<String, Integer> m1 = new KeyValPair<String, Integer>("a",1);
+    oper.in1.process(m1);
+    KeyValPair<String, Double> m2 = new KeyValPair<String, Double>("a",1.0);
+    oper.in2.process(m2);
+    oper.endWindow();
+    Assert.assertEquals("number emitted tuples", 1, cSink.collectedTuples.size());
 
 
-		HashMap<String, ArrayList<Object>> map = (HashMap<String, ArrayList<Object>>) cSink.collectedTuples.get(0);
-		Assert.assertEquals("size of sink map", 1, map.size());
-	}
+    HashMap<String, ArrayList<Object>> map = (HashMap<String, ArrayList<Object>>)cSink.collectedTuples.get(0);
+    Assert.assertEquals("size of sink map", 1, map.size());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/CounterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/CounterTest.java b/library/src/test/java/com/datatorrent/lib/stream/CounterTest.java
index c9837cf..dc73a75 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/CounterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/CounterTest.java
@@ -27,33 +27,34 @@ import com.datatorrent.lib.testbench.CountTestSink;
  * Functional test for {@link com.datatorrent.lib.stream.Counter}<p>
  * <br>
  */
-public class CounterTest {
-
-    /**
-     * Test oper pass through. The Object passed is not relevant
-     */
-    @SuppressWarnings({ "rawtypes", "unchecked" })
-    @Test
-    public void testNodeProcessing() throws Exception
-    {
-      Counter oper = new Counter();
-      CountTestSink cSink = new CountTestSink();
-
-      oper.output.setSink(cSink);
-      int numtuples = 100;
-
-      oper.beginWindow(0);
-      for (int i = 0; i < numtuples; i++) {
-        oper.input.process(i);
-      }
-      oper.endWindow();
-
-      oper.beginWindow(1);
-      for (int i = 0; i < numtuples; i++) {
-        oper.input.process(i);
-      }
-      oper.endWindow();
-
-      Assert.assertEquals("number emitted tuples", 2, cSink.getCount());
+public class CounterTest
+{
+
+  /**
+   * Test oper pass through. The Object passed is not relevant
+   */
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    Counter oper = new Counter();
+    CountTestSink cSink = new CountTestSink();
+
+    oper.output.setSink(cSink);
+    int numtuples = 100;
+
+    oper.beginWindow(0);
+    for (int i = 0; i < numtuples; i++) {
+      oper.input.process(i);
     }
+    oper.endWindow();
+
+    oper.beginWindow(1);
+    for (int i = 0; i < numtuples; i++) {
+      oper.input.process(i);
+    }
+    oper.endWindow();
+
+    Assert.assertEquals("number emitted tuples", 2, cSink.getCount());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java b/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java
index 0cb5f42..6266787 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java
@@ -18,13 +18,12 @@
  */
 package com.datatorrent.lib.stream;
 
-import com.datatorrent.lib.stream.DevNullCounter;
-import com.datatorrent.lib.testbench.EventGenerator;
-
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.lib.testbench.EventGenerator;
+
 /**
  * 
  * Functional tests for {@link com.datatorrent.lib.testbench.DevNullCounter}.
@@ -41,29 +40,26 @@ import org.slf4j.LoggerFactory;
 public class DevNullCounterTest
 {
 
-	private static Logger LOG = LoggerFactory.getLogger(EventGenerator.class);
+  private static Logger LOG = LoggerFactory.getLogger(EventGenerator.class);
 
-	/**
-	 * Tests both string and non string schema
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testSingleSchemaNodeProcessing() throws Exception
-	{
-		DevNullCounter oper = new DevNullCounter();
-		oper.setRollingwindowcount(5);
-		oper.setup(null);
+  /**
+   * Tests both string and non string schema
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
+  public void testSingleSchemaNodeProcessing() throws Exception
+  {
+    DevNullCounter oper = new DevNullCounter();
+    oper.setRollingwindowcount(5);
+    oper.setup(null);
 
-		oper.beginWindow(0);
-		long numtuples = 1000000;
-		Object o = new Object();
-		for (long i = 0; i < numtuples; i++) {
-			oper.data.process(o);
-		}
-		oper.endWindow();
-		LOG.info(String
-				.format(
-						"\n*******************************************************\nnumtuples(%d)",
-						numtuples));
-	}
+    oper.beginWindow(0);
+    long numtuples = 1000000;
+    Object o = new Object();
+    for (long i = 0; i < numtuples; i++) {
+      oper.data.process(o);
+    }
+    oper.endWindow();
+    LOG.info(String.format("\n*******************************************************\nnumtuples(%d)", numtuples));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java b/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java
index 4695d69..3bd9c11 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java
@@ -18,26 +18,25 @@
  */
 package com.datatorrent.lib.stream;
 
-import com.datatorrent.lib.stream.DevNull;
-import com.datatorrent.lib.testbench.EventGenerator;
-
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.lib.testbench.EventGenerator;
+
 /**
  * Functional tests for {@link com.datatorrent.lib.testbench.DevNull}. 
  */
-public class DevNullTest {
-
-    private static Logger LOG = LoggerFactory.getLogger(EventGenerator.class);
+public class DevNullTest
+{
 
+  private static Logger LOG = LoggerFactory.getLogger(EventGenerator.class);
 
   /**
    * Tests both string and non string schema
    */
-  @SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  @Test
   public void testSingleSchemaNodeProcessing() throws Exception
   {
     DevNull oper = new DevNull();
@@ -49,6 +48,6 @@ public class DevNullTest {
       oper.data.process(o);
     }
     oper.endWindow();
-    LOG.info(String.format("\n*******************************************************\nnumtuples(%d)",  numtuples));
+    LOG.info(String.format("\n*******************************************************\nnumtuples(%d)", numtuples));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/HashMapToKeyValPairTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/HashMapToKeyValPairTest.java b/library/src/test/java/com/datatorrent/lib/stream/HashMapToKeyValPairTest.java
index 99305e3..314eb01 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/HashMapToKeyValPairTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/HashMapToKeyValPairTest.java
@@ -31,35 +31,35 @@ import com.datatorrent.lib.testbench.CountTestSink;
 public class HashMapToKeyValPairTest
 {
 
-	/**
-	 * Test oper pass through. The Object passed is not relevant
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		HashMapToKeyValPair oper = new HashMapToKeyValPair();
-		CountTestSink keySink = new CountTestSink();
-		CountTestSink valSink = new CountTestSink();
-		CountTestSink keyvalSink = new CountTestSink();
+  /**
+   * Test oper pass through. The Object passed is not relevant
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    HashMapToKeyValPair oper = new HashMapToKeyValPair();
+    CountTestSink keySink = new CountTestSink();
+    CountTestSink valSink = new CountTestSink();
+    CountTestSink keyvalSink = new CountTestSink();
 
-		oper.key.setSink(keySink);
-		oper.val.setSink(valSink);
-		oper.keyval.setSink(keyvalSink);
+    oper.key.setSink(keySink);
+    oper.val.setSink(valSink);
+    oper.keyval.setSink(keyvalSink);
 
-		oper.beginWindow(0);
-		HashMap<String, String> input = new HashMap<String, String>();
-		input.put("a", "1");
-		// Same input object can be used as the oper is just pass through
-		int numtuples = 1000;
-		for (int i = 0; i < numtuples; i++) {
-			oper.data.process(input);
-		}
+    oper.beginWindow(0);
+    HashMap<String, String> input = new HashMap<String, String>();
+    input.put("a", "1");
+    // Same input object can be used as the oper is just pass through
+    int numtuples = 1000;
+    for (int i = 0; i < numtuples; i++) {
+      oper.data.process(input);
+    }
 
-		oper.endWindow();
+    oper.endWindow();
 
-		Assert.assertEquals("number emitted tuples", numtuples, keySink.count);
-		Assert.assertEquals("number emitted tuples", numtuples, valSink.count);
-		Assert.assertEquals("number emitted tuples", numtuples, keyvalSink.count);
-	}
+    Assert.assertEquals("number emitted tuples", numtuples, keySink.count);
+    Assert.assertEquals("number emitted tuples", numtuples, valSink.count);
+    Assert.assertEquals("number emitted tuples", numtuples, keyvalSink.count);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/JsonByteArrayOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/JsonByteArrayOperatorTest.java b/library/src/test/java/com/datatorrent/lib/stream/JsonByteArrayOperatorTest.java
index a46eee7..e376c95 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/JsonByteArrayOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/JsonByteArrayOperatorTest.java
@@ -32,76 +32,75 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class JsonByteArrayOperatorTest
 {
-   /**
-     * Test json byte array to HashMap operator pass through. The Object passed is not relevant
-     */
-    @SuppressWarnings({ "rawtypes", "unchecked" })
-		@Test
-    public void testOperator() throws Exception
-    {
-      JsonByteArrayOperator oper = new JsonByteArrayOperator();
-      oper.setConcatenationCharacter('.');
-
-      CollectorTestSink mapSink = new CollectorTestSink();
-      CollectorTestSink jsonObjectSink = new CollectorTestSink();
-      CollectorTestSink flatMapSink = new CollectorTestSink();
-
-      oper.outputMap.setSink(mapSink);
-      oper.outputJsonObject.setSink(jsonObjectSink);
-      oper.outputFlatMap.setSink(flatMapSink);
-
-      oper.beginWindow(0);
-
-      // input test json string
-      String inputJson  = " {   \"@timestamp\":\"2013-09-25T19:37:23.569Z\""
-                        + "      ,\"@version\":\"1\""
-                        + "          ,\"type\":\"apache-logs\""
-                        + "          ,\"host\":\"node1001\""
-                        + "      ,\"clientip\":192.168.150.120"
-                        + "          ,\"verb\":\"GET\""
-                        + "       ,\"request\":\"/reset.css\""
-                        + "   ,\"httpversion\":\"1.1\""
-                        + "      ,\"response\":200"
-                        + "     ,\"agentinfo\": {\"browser\":Firefox"
-                        + "                          ,\"os\": {    \"name\":\"Ubuntu\""
-                        + "                                    ,\"version\":\"10.04\""
-                        + "                                   }"
-                        + "                     }"
-                        + "         ,\"bytes\":909.1"
-                        + " }";
-
-      byte[] inputByteArray = inputJson.getBytes();
-
-      // run the operator for the same string 1000 times
-      int numtuples = 1000;
-      for (int i = 0; i < numtuples; i++) {
-        oper.input.process(inputByteArray);
-      }
-
-      oper.endWindow();
-
-      // assert that the number of the operator generates is 1000
-      Assert.assertEquals("number emitted tuples", numtuples, mapSink.collectedTuples.size());
-      Assert.assertEquals("number emitted tuples", numtuples, jsonObjectSink.collectedTuples.size());
-      Assert.assertEquals("number emitted tuples", numtuples, flatMapSink.collectedTuples.size());
-
-      // assert that value for one of the keys in any one of the objects from mapSink is as expected
-      Object map = mapSink.collectedTuples.get(510);
-      String expectedClientip = "192.168.150.120";
-      Assert.assertEquals("emitted tuple", expectedClientip, ((Map)map).get("clientip"));
-
-
-      // assert that value for one of the keys in any one of the objects from jsonObjectSink is as expected
-      Object jsonObject = jsonObjectSink.collectedTuples.get(433);
-      Number expectedResponse = 200;
-      Assert.assertEquals("emitted tuple", expectedResponse, ((JSONObject)jsonObject).get("response"));
-
-      // assert that value for one of the keys in any one of the objects from flatMapSink is as expected
-      Map flatMap = (Map)flatMapSink.collectedTuples.get(511);
-      String expectedBrowser = "Firefox";
-      String expectedOsName = "Ubuntu";
-      Assert.assertEquals("emitted tuple", expectedBrowser, flatMap.get("agentinfo.browser"));
-      Assert.assertEquals("emitted tuple", expectedOsName, flatMap.get("agentinfo.os.name"));
+  /**
+   * Test json byte array to HashMap operator pass through. The Object passed is not relevant
+   */
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  @Test
+  public void testOperator() throws Exception
+  {
+    JsonByteArrayOperator oper = new JsonByteArrayOperator();
+    oper.setConcatenationCharacter('.');
+
+    CollectorTestSink mapSink = new CollectorTestSink();
+    CollectorTestSink jsonObjectSink = new CollectorTestSink();
+    CollectorTestSink flatMapSink = new CollectorTestSink();
+
+    oper.outputMap.setSink(mapSink);
+    oper.outputJsonObject.setSink(jsonObjectSink);
+    oper.outputFlatMap.setSink(flatMapSink);
+
+    oper.beginWindow(0);
+
+    // input test json string
+    String inputJson = " {   \"@timestamp\":\"2013-09-25T19:37:23.569Z\""
+        + "      ,\"@version\":\"1\""
+        + "          ,\"type\":\"apache-logs\""
+        + "          ,\"host\":\"node1001\""
+        + "      ,\"clientip\":192.168.150.120"
+        + "          ,\"verb\":\"GET\""
+        + "       ,\"request\":\"/reset.css\""
+        + "   ,\"httpversion\":\"1.1\""
+        + "      ,\"response\":200"
+        + "     ,\"agentinfo\": {\"browser\":Firefox"
+        + "                          ,\"os\": {    \"name\":\"Ubuntu\""
+        + "                                    ,\"version\":\"10.04\""
+        + "                                   }"
+        + "                     }"
+        + "         ,\"bytes\":909.1"
+        + " }";
+
+    byte[] inputByteArray = inputJson.getBytes();
+
+    // run the operator for the same string 1000 times
+    int numtuples = 1000;
+    for (int i = 0; i < numtuples; i++) {
+      oper.input.process(inputByteArray);
     }
 
+    oper.endWindow();
+
+    // assert that the number of the operator generates is 1000
+    Assert.assertEquals("number emitted tuples", numtuples, mapSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", numtuples, jsonObjectSink.collectedTuples.size());
+    Assert.assertEquals("number emitted tuples", numtuples, flatMapSink.collectedTuples.size());
+
+    // assert that value for one of the keys in any one of the objects from mapSink is as expected
+    Object map = mapSink.collectedTuples.get(510);
+    String expectedClientip = "192.168.150.120";
+    Assert.assertEquals("emitted tuple", expectedClientip, ((Map)map).get("clientip"));
+
+    // assert that value for one of the keys in any one of the objects from jsonObjectSink is as expected
+    Object jsonObject = jsonObjectSink.collectedTuples.get(433);
+    Number expectedResponse = 200;
+    Assert.assertEquals("emitted tuple", expectedResponse, ((JSONObject)jsonObject).get("response"));
+
+    // assert that value for one of the keys in any one of the objects from flatMapSink is as expected
+    Map flatMap = (Map)flatMapSink.collectedTuples.get(511);
+    String expectedBrowser = "Firefox";
+    String expectedOsName = "Ubuntu";
+    Assert.assertEquals("emitted tuple", expectedBrowser, flatMap.get("agentinfo.browser"));
+    Assert.assertEquals("emitted tuple", expectedOsName, flatMap.get("agentinfo.os.name"));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/KeyPairToHashMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/KeyPairToHashMapTest.java b/library/src/test/java/com/datatorrent/lib/stream/KeyPairToHashMapTest.java
index 2b5a583..2d0595f 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/KeyPairToHashMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/KeyPairToHashMapTest.java
@@ -32,28 +32,28 @@ import com.datatorrent.lib.util.KeyValPair;
 public class KeyPairToHashMapTest
 {
 
-	/**
-	 * Test oper pass through. The Object passed is not relevant
-	 */
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		KeyValPairToHashMap oper = new KeyValPairToHashMap();
-		CountTestSink mapSink = new CountTestSink();
-
-		oper.map.setSink(mapSink);
-
-		oper.beginWindow(0);
-		KeyValPair<String, String> input = new KeyValPair<String, String>("a", "1");
-
-		// Same input object can be used as the oper is just pass through
-		int numtuples = 1000;
-		for (int i = 0; i < numtuples; i++) {
-			oper.keyval.process(input);
-		}
-		oper.endWindow();
-
-		Assert.assertEquals("number emitted tuples", numtuples, mapSink.count);
-	}
+  /**
+   * Test oper pass through. The Object passed is not relevant
+   */
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    KeyValPairToHashMap oper = new KeyValPairToHashMap();
+    CountTestSink mapSink = new CountTestSink();
+
+    oper.map.setSink(mapSink);
+
+    oper.beginWindow(0);
+    KeyValPair<String, String> input = new KeyValPair<String, String>("a", "1");
+
+    // Same input object can be used as the oper is just pass through
+    int numtuples = 1000;
+    for (int i = 0; i < numtuples; i++) {
+      oper.keyval.process(input);
+    }
+    oper.endWindow();
+
+    Assert.assertEquals("number emitted tuples", numtuples, mapSink.count);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/RoundRobinHashMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/RoundRobinHashMapTest.java b/library/src/test/java/com/datatorrent/lib/stream/RoundRobinHashMapTest.java
index c26c8d0..d78e369 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/RoundRobinHashMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/RoundRobinHashMapTest.java
@@ -33,59 +33,59 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
 public class RoundRobinHashMapTest
 {
 
-	private static Logger log = LoggerFactory
-			.getLogger(RoundRobinHashMapTest.class);
+  private static Logger log = LoggerFactory
+      .getLogger(RoundRobinHashMapTest.class);
 
-	/**
-	 * Test operator pass through. The Object passed is not relevant
-	 */
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testNodeProcessing() throws Exception
-	{
-		RoundRobinHashMap oper = new RoundRobinHashMap();
-		CollectorTestSink mapSink = new CollectorTestSink();
+  /**
+   * Test operator pass through. The Object passed is not relevant
+   */
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    RoundRobinHashMap oper = new RoundRobinHashMap();
+    CollectorTestSink mapSink = new CollectorTestSink();
 
-		String[] keys = new String[3];
-		keys[0] = "a";
-		keys[1] = "b";
-		keys[2] = "c";
+    String[] keys = new String[3];
+    keys[0] = "a";
+    keys[1] = "b";
+    keys[2] = "c";
 
-		oper.setKeys(keys);
-		oper.map.setSink(mapSink);
-		oper.beginWindow(0);
+    oper.setKeys(keys);
+    oper.map.setSink(mapSink);
+    oper.beginWindow(0);
 
-		HashMap<String, Integer> t1 = new HashMap<String, Integer>();
-		t1.put("a", 0);
-		t1.put("b", 1);
-		t1.put("c", 2);
-		HashMap<String, Integer> t2 = new HashMap<String, Integer>();
-		t2.put("a", 3);
-		t2.put("b", 4);
-		t2.put("c", 5);
-		HashMap<String, Integer> t3 = new HashMap<String, Integer>();
-		t3.put("a", 6);
-		t3.put("b", 7);
-		t3.put("c", 8);
+    HashMap<String, Integer> t1 = new HashMap<String, Integer>();
+    t1.put("a", 0);
+    t1.put("b", 1);
+    t1.put("c", 2);
+    HashMap<String, Integer> t2 = new HashMap<String, Integer>();
+    t2.put("a", 3);
+    t2.put("b", 4);
+    t2.put("c", 5);
+    HashMap<String, Integer> t3 = new HashMap<String, Integer>();
+    t3.put("a", 6);
+    t3.put("b", 7);
+    t3.put("c", 8);
 
-		HashMap<String, Integer> t4 = new HashMap<String, Integer>();
-		t4.put("a", 9);
-		t4.put("b", 10);
-		t4.put("c", 11);
+    HashMap<String, Integer> t4 = new HashMap<String, Integer>();
+    t4.put("a", 9);
+    t4.put("b", 10);
+    t4.put("c", 11);
 
-		// Same input object can be used as the oper is just pass through
-		int numtuples = 12;
-		for (int i = 0; i < numtuples; i++) {
-			oper.data.process(i);
-		}
-		oper.endWindow();
+    // Same input object can be used as the oper is just pass through
+    int numtuples = 12;
+    for (int i = 0; i < numtuples; i++) {
+      oper.data.process(i);
+    }
+    oper.endWindow();
 
-		Assert.assertEquals("number emitted tuples", numtuples / 3,
-				mapSink.collectedTuples.size());
-		log.debug(mapSink.collectedTuples.toString());
-		Assert.assertEquals("tuple 1", t1, mapSink.collectedTuples.get(0));
-		Assert.assertEquals("tuple 2", t2, mapSink.collectedTuples.get(1));
-		Assert.assertEquals("tuple 3", t3, mapSink.collectedTuples.get(2));
-		Assert.assertEquals("tuple 4", t4, mapSink.collectedTuples.get(3));
-	}
+    Assert.assertEquals("number emitted tuples", numtuples / 3,
+        mapSink.collectedTuples.size());
+    log.debug(mapSink.collectedTuples.toString());
+    Assert.assertEquals("tuple 1", t1, mapSink.collectedTuples.get(0));
+    Assert.assertEquals("tuple 2", t2, mapSink.collectedTuples.get(1));
+    Assert.assertEquals("tuple 3", t3, mapSink.collectedTuples.get(2));
+    Assert.assertEquals("tuple 4", t4, mapSink.collectedTuples.get(3));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/StreamDuplicaterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/StreamDuplicaterTest.java b/library/src/test/java/com/datatorrent/lib/stream/StreamDuplicaterTest.java
index 1c05b6c..7586c70 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/StreamDuplicaterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/StreamDuplicaterTest.java
@@ -28,34 +28,35 @@ import com.datatorrent.lib.testbench.CountTestSink;
  * Benchmarks: Currently does about ?? Million tuples/sec in debugging environment. Need to test on larger nodes<br>
  * <br>
  */
-public class StreamDuplicaterTest {
-
-    /**
-     * Test oper pass through. The Object passed is not relevant
-     */
-    @SuppressWarnings({ "rawtypes", "unchecked" })
-    @Test
-    public void testNodeProcessing() throws Exception
-    {
-      StreamDuplicater oper = new StreamDuplicater();
-      CountTestSink mergeSink1 = new CountTestSink();
-      CountTestSink mergeSink2 = new CountTestSink();
-
-      oper.out1.setSink(mergeSink1);
-      oper.out2.setSink(mergeSink2);
-
-      oper.beginWindow(0);
-      int numtuples = 1000;
-      Integer input = new Integer(0);
-      // Same input object can be used as the oper is just pass through
-      for (int i = 0; i < numtuples; i++) {
-        oper.data.process(input);
-      }
-
-      oper.endWindow();
-
-      // One for each key
-      Assert.assertEquals("number emitted tuples", numtuples, mergeSink1.count);
-      Assert.assertEquals("number emitted tuples", numtuples, mergeSink2.count);
+public class StreamDuplicaterTest
+{
+
+  /**
+   * Test oper pass through. The Object passed is not relevant
+   */
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
+    StreamDuplicater oper = new StreamDuplicater();
+    CountTestSink mergeSink1 = new CountTestSink();
+    CountTestSink mergeSink2 = new CountTestSink();
+
+    oper.out1.setSink(mergeSink1);
+    oper.out2.setSink(mergeSink2);
+
+    oper.beginWindow(0);
+    int numtuples = 1000;
+    Integer input = 0;
+    // Same input object can be used as the oper is just pass through
+    for (int i = 0; i < numtuples; i++) {
+      oper.data.process(input);
     }
+
+    oper.endWindow();
+
+    // One for each key
+    Assert.assertEquals("number emitted tuples", numtuples, mergeSink1.count);
+    Assert.assertEquals("number emitted tuples", numtuples, mergeSink2.count);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/stream/StreamMergerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/StreamMergerTest.java b/library/src/test/java/com/datatorrent/lib/stream/StreamMergerTest.java
index fe626f4..995dc6f 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/StreamMergerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/StreamMergerTest.java
@@ -43,7 +43,7 @@ public class StreamMergerTest
 
     oper.beginWindow(0);
     int numtuples = 500;
-    Integer input = new Integer(0);
+    Integer input = 0;
     // Same input object can be used as the oper is just pass through
     for (int i = 0; i < numtuples; i++) {
       oper.data1.process(input);
@@ -51,6 +51,6 @@ public class StreamMergerTest
     }
 
     oper.endWindow();
-    Assert.assertEquals("number emitted tuples", numtuples*2, mergeSink.count);
+    Assert.assertEquals("number emitted tuples", numtuples * 2, mergeSink.count);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/DeleteOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/DeleteOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/DeleteOperatorTest.java
index b0d3c01..1f29d1d 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/DeleteOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/DeleteOperatorTest.java
@@ -21,8 +21,9 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import com.datatorrent.lib.streamquery.DeleteOperator;
 import com.datatorrent.lib.streamquery.condition.EqualValueCondition;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
@@ -31,44 +32,46 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class DeleteOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-	DeleteOperator oper = new DeleteOperator();
-  	
-  	EqualValueCondition  condition = new EqualValueCondition();
-  	condition.addEqualValue("a", 1);
-  	oper.setCondition(condition);
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    DeleteOperator oper = new DeleteOperator();
+
+    EqualValueCondition  condition = new EqualValueCondition();
+    condition.addEqualValue("a", 1);
+    oper.setCondition(condition);
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(DeleteOperatorTest.class);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/FullOuterJoinOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/FullOuterJoinOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/FullOuterJoinOperatorTest.java
index 97d587b..728fb96 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/FullOuterJoinOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/FullOuterJoinOperatorTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.condition.Condition;
 import com.datatorrent.lib.streamquery.condition.JoinColumnEqualCondition;
@@ -29,16 +31,16 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
 
 public class FullOuterJoinOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-		OuterJoinOperator oper = new OuterJoinOperator();	
-		oper.setFullJoin(true);
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
+    // create operator
+    OuterJoinOperator oper = new OuterJoinOperator();
+    oper.setFullJoin(true);
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
     // set column join condition  
     Condition cond = new JoinColumnEqualCondition("a", "a");
     oper.setJoinCondition(cond);
@@ -46,43 +48,46 @@ public class FullOuterJoinOperatorTest
     // add columns  
     oper.selectTable1Column(new ColumnIndex("b", null));
     oper.selectTable2Column(new ColumnIndex("c", null));
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport1.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport1.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 2);
-  	tuple.put("b", 11);
-  	tuple.put("c", 12);
-  	oper.inport1.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 7);
-  	tuple.put("c", 8);
-  	oper.inport2.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport2.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport1.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport1.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 2);
+    tuple.put("b", 11);
+    tuple.put("c", 12);
+    oper.inport1.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 7);
+    tuple.put("c", 8);
+    oper.inport2.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport2.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(FullOuterJoinOperatorTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/GroupByOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/GroupByOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/GroupByOperatorTest.java
index 0d4c939..714f93b 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/GroupByOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/GroupByOperatorTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.condition.EqualValueCondition;
 import com.datatorrent.lib.streamquery.function.SumFunction;
@@ -32,58 +34,61 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class GroupByOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlGroupBy()
   {
-  	// create operator   
-	  GroupByHavingOperator oper = new GroupByHavingOperator();
-  	oper.addColumnGroupByIndex(new ColumnIndex("b", null));
-  	try {
+    // create operator
+    GroupByHavingOperator oper = new GroupByHavingOperator();
+    oper.addColumnGroupByIndex(new ColumnIndex("b", null));
+    try {
       oper.addAggregateIndex(new SumFunction("c", null));
     } catch (Exception e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
       return;
     }
-  	
-  	EqualValueCondition  condition = new EqualValueCondition();
-  	condition.addEqualValue("a", 1);
-  	oper.setCondition(condition);
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 1);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 2);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
+
+    EqualValueCondition  condition = new EqualValueCondition();
+    condition.addEqualValue("a", 1);
+    oper.setCondition(condition);
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 1);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 2);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
     tuple = new HashMap<String, Object>();
     tuple.put("a", 1);
     tuple.put("b", 2);
     tuple.put("c", 7);
     oper.inport.process(tuple);
     
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(GroupByOperatorTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/HavingOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/HavingOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/HavingOperatorTest.java
index 7ccb2ed..e11723d 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/HavingOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/HavingOperatorTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.condition.EqualValueCondition;
 import com.datatorrent.lib.streamquery.condition.HavingCompareValue;
@@ -35,57 +37,60 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class HavingOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlGroupBy() throws Exception
   {
-  	// create operator   
-	  GroupByHavingOperator oper = new GroupByHavingOperator();
-  	oper.addColumnGroupByIndex(new ColumnIndex("b", null));
-  	FunctionIndex sum = new SumFunction("c", null);
+    // create operator
+    GroupByHavingOperator oper = new GroupByHavingOperator();
+    oper.addColumnGroupByIndex(new ColumnIndex("b", null));
+    FunctionIndex sum = new SumFunction("c", null);
     oper.addAggregateIndex(sum);
 
     // create having condition
     HavingCondition having = new HavingCompareValue<Double>(sum, 6.0, 0);
     oper.addHavingCondition(having);
-  	
-  	EqualValueCondition  condition = new EqualValueCondition();
-  	condition.addEqualValue("a", 1);
-  	oper.setCondition(condition);
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 1);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 2);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
+
+    EqualValueCondition  condition = new EqualValueCondition();
+    condition.addEqualValue("a", 1);
+    oper.setCondition(condition);
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 1);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 2);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
     tuple = new HashMap<String, Object>();
     tuple.put("a", 1);
     tuple.put("b", 2);
     tuple.put("c", 7);
     oper.inport.process(tuple);
     
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(HavingOperatorTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/InnerJoinOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/InnerJoinOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/InnerJoinOperatorTest.java
index 2f14f16..8a022ee 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/InnerJoinOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/InnerJoinOperatorTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.condition.Condition;
 import com.datatorrent.lib.streamquery.condition.JoinColumnEqualCondition;
@@ -34,53 +36,56 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class InnerJoinOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-		InnerJoinOperator oper = new InnerJoinOperator();	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	// set column join condition  
-  	Condition cond = new JoinColumnEqualCondition("a", "a");
-  	oper.setJoinCondition(cond);
-  	
-  	// add columns  
-  	oper.selectTable1Column(new ColumnIndex("b", null));
-  	oper.selectTable2Column(new ColumnIndex("c", null));
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport1.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport1.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 7);
-  	tuple.put("c", 8);
-  	oper.inport2.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport2.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    InnerJoinOperator oper = new InnerJoinOperator();
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    // set column join condition
+    Condition cond = new JoinColumnEqualCondition("a", "a");
+    oper.setJoinCondition(cond);
+
+    // add columns
+    oper.selectTable1Column(new ColumnIndex("b", null));
+    oper.selectTable2Column(new ColumnIndex("c", null));
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport1.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport1.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 7);
+    tuple.put("c", 8);
+    oper.inport2.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport2.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(InnerJoinOperatorTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/LeftOuterJoinOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/LeftOuterJoinOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/LeftOuterJoinOperatorTest.java
index 32e5b13..aa25e87 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/LeftOuterJoinOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/LeftOuterJoinOperatorTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.condition.Condition;
 import com.datatorrent.lib.streamquery.condition.JoinColumnEqualCondition;
@@ -29,15 +31,15 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
 
 public class LeftOuterJoinOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-		OuterJoinOperator oper = new OuterJoinOperator();	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
+    // create operator
+    OuterJoinOperator oper = new OuterJoinOperator();
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
     // set column join condition  
     Condition cond = new JoinColumnEqualCondition("a", "a");
     oper.setJoinCondition(cond);
@@ -45,43 +47,46 @@ public class LeftOuterJoinOperatorTest
     // add columns  
     oper.selectTable1Column(new ColumnIndex("b", null));
     oper.selectTable2Column(new ColumnIndex("c", null));
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport1.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport1.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 2);
-  	tuple.put("b", 11);
-  	tuple.put("c", 12);
-  	oper.inport1.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 7);
-  	tuple.put("c", 8);
-  	oper.inport2.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport2.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport1.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport1.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 2);
+    tuple.put("b", 11);
+    tuple.put("c", 12);
+    oper.inport1.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 7);
+    tuple.put("c", 8);
+    oper.inport2.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport2.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(LeftOuterJoinOperatorTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/OrderByOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/OrderByOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/OrderByOperatorTest.java
index b233290..2d7ba87 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/OrderByOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/OrderByOperatorTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
@@ -29,60 +31,63 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class OrderByOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// craete operator   
+    // craete operator
     OrderByOperator oper = new OrderByOperator();
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	oper.addOrderByRule(new OrderByRule<Integer>("b"));
-  	oper.setDescending(true);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("c", 2);
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 2);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 2);
-  	tuple.put("b", 6);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 4);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 8);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+    oper.addOrderByRule(new OrderByRule<Integer>("b"));
+    oper.setDescending(true);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("c", 2);
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 2);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 2);
+    tuple.put("b", 6);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 4);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 8);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(OrderByOperatorTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/RightOuterJoinOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/RightOuterJoinOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/RightOuterJoinOperatorTest.java
index f99ee25..3a57427 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/RightOuterJoinOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/RightOuterJoinOperatorTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.condition.Condition;
 import com.datatorrent.lib.streamquery.condition.JoinColumnEqualCondition;
@@ -29,16 +31,16 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
 
 public class RightOuterJoinOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-		OuterJoinOperator oper = new OuterJoinOperator();	
-		oper.setRighttJoin();
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
+    // create operator
+    OuterJoinOperator oper = new OuterJoinOperator();
+    oper.setRighttJoin();
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
     // set column join condition  
     Condition cond = new JoinColumnEqualCondition("a", "a");
     oper.setJoinCondition(cond);
@@ -46,44 +48,47 @@ public class RightOuterJoinOperatorTest
     // add columns  
     oper.selectTable1Column(new ColumnIndex("b", null));
     oper.selectTable2Column(new ColumnIndex("c", null));
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport1.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport1.process(tuple);
-  	
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 7);
-  	tuple.put("c", 8);
-  	oper.inport2.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport2.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport1.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport1.process(tuple);
+
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 7);
+    tuple.put("c", 8);
+    oper.inport2.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport2.process(tuple);
+
+    tuple = new HashMap<String, Object>();
     tuple.put("a", 2);
     tuple.put("b", 11);
     tuple.put("c", 12);
     oper.inport2.process(tuple);
     
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(RightOuterJoinOperatorTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/SelectOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/SelectOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/SelectOperatorTest.java
index 3ac18f8..8e6620e 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/SelectOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/SelectOperatorTest.java
@@ -21,8 +21,9 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import com.datatorrent.lib.streamquery.SelectOperator;
 import com.datatorrent.lib.streamquery.condition.EqualValueCondition;
 import com.datatorrent.lib.streamquery.index.ColumnIndex;
 import com.datatorrent.lib.testbench.CollectorTestSink;
@@ -32,46 +33,49 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class SelectOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectOperator oper = new SelectOperator();
-  	oper.addIndex(new ColumnIndex("b", null));
-  	oper.addIndex(new ColumnIndex("c", null));
-  	
-  	EqualValueCondition  condition = new EqualValueCondition();
-  	condition.addEqualValue("a", 1);
-  	oper.setCondition(condition);
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    SelectOperator oper = new SelectOperator();
+    oper.addIndex(new ColumnIndex("b", null));
+    oper.addIndex(new ColumnIndex("c", null));
+
+    EqualValueCondition  condition = new EqualValueCondition();
+    condition.addEqualValue("a", 1);
+    oper.setCondition(condition);
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(SelectOperatorTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/SelectTopOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/SelectTopOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/SelectTopOperatorTest.java
index 8c894d1..c92c6c1 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/SelectTopOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/SelectTopOperatorTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
@@ -54,7 +56,10 @@ public class SelectTopOperatorTest
     tuple.put("c", 6);
     oper.inport.process(tuple);
     oper.endWindow();
-    
-    System.out.println(sink.collectedTuples.toString());
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(SelectTopOperatorTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/UpdateOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/UpdateOperatorTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/UpdateOperatorTest.java
index 70713db..42af56b 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/UpdateOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/UpdateOperatorTest.java
@@ -21,52 +21,56 @@ package com.datatorrent.lib.streamquery;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import com.datatorrent.lib.streamquery.UpdateOperator;
 import com.datatorrent.lib.streamquery.condition.EqualValueCondition;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
 public class UpdateOperatorTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-		UpdateOperator oper = new UpdateOperator();
-  	
-  	EqualValueCondition  condition = new EqualValueCondition();
-  	condition.addEqualValue("a", 1);
-  	oper.setCondition(condition);
-  	oper.addUpdate("c", 100);
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    UpdateOperator oper = new UpdateOperator();
+
+    EqualValueCondition  condition = new EqualValueCondition();
+    condition.addEqualValue("a", 1);
+    oper.setCondition(condition);
+    oper.addUpdate("c", 100);
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(UpdateOperatorTest.class);
+
 }


[02/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/BetweenConditionTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/BetweenConditionTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/BetweenConditionTest.java
index 0d0ef05..b0500eb 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/BetweenConditionTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/BetweenConditionTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectOperator;
 import com.datatorrent.lib.streamquery.condition.BetweenCondition;
@@ -32,52 +34,54 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class BetweenConditionTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectOperator oper = new SelectOperator();
-  	oper.addIndex(new ColumnIndex("b", null));
-  	oper.addIndex(new ColumnIndex("c", null));
-  	
-  	BetweenCondition cond = new BetweenCondition("a", 0, 2); 
-  	oper.setCondition(cond);
-  	
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 2);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
+    // create operator
+    SelectOperator oper = new SelectOperator();
+    oper.addIndex(new ColumnIndex("b", null));
+    oper.addIndex(new ColumnIndex("c", null));
+
+    BetweenCondition cond = new BetweenCondition("a", 0, 2);
+    oper.setCondition(cond);
+
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 2);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
     tuple.put("a", 3);
     tuple.put("b", 7);
     tuple.put("c", 8);
     oper.inport.process(tuple);
     
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(BetweenConditionTest.class);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/CompoundConditionTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/CompoundConditionTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/CompoundConditionTest.java
index 55b36a7..2f92c9b 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/CompoundConditionTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/CompoundConditionTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectOperator;
 import com.datatorrent.lib.streamquery.condition.CompoundCondition;
@@ -33,56 +35,58 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class CompoundConditionTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectOperator oper = new SelectOperator();
-  	oper.addIndex(new ColumnIndex("b", null));
-  	oper.addIndex(new ColumnIndex("c", null));
-  	
-  	EqualValueCondition  left = new EqualValueCondition();
-  	left.addEqualValue("a", 1);
-  	EqualValueCondition  right = new EqualValueCondition();
+    // create operator
+    SelectOperator oper = new SelectOperator();
+    oper.addIndex(new ColumnIndex("b", null));
+    oper.addIndex(new ColumnIndex("c", null));
+
+    EqualValueCondition  left = new EqualValueCondition();
+    left.addEqualValue("a", 1);
+    EqualValueCondition  right = new EqualValueCondition();
     right.addEqualValue("b", 1);
-  	 
-  	oper.setCondition(new CompoundCondition(left, right));
-  	
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
+
+    oper.setCondition(new CompoundCondition(left, right));
+
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
     tuple.put("a", 3);
     tuple.put("b", 7);
     tuple.put("c", 8);
     oper.inport.process(tuple);
     
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(CompoundConditionTest.class);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/InConditionTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/InConditionTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/InConditionTest.java
index b43b6dd..d235a9c 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/InConditionTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/InConditionTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectOperator;
 import com.datatorrent.lib.streamquery.condition.InCondition;
@@ -32,54 +34,57 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class InConditionTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectOperator oper = new SelectOperator();
-  	oper.addIndex(new ColumnIndex("b", null));
-  	oper.addIndex(new ColumnIndex("c", null));
-  	
+    // create operator
+    SelectOperator oper = new SelectOperator();
+    oper.addIndex(new ColumnIndex("b", null));
+    oper.addIndex(new ColumnIndex("c", null));
+
     InCondition cond = new InCondition("a"); 
     cond.addInValue(0);
     cond.addInValue(1);
-  	oper.setCondition(cond);
-  	
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 2);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
+    oper.setCondition(cond);
+
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 2);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
     tuple.put("a", 3);
     tuple.put("b", 7);
     tuple.put("c", 8);
     oper.inport.process(tuple);
     
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(InConditionTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/LikeConditionTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/LikeConditionTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/LikeConditionTest.java
index 4d67708..b4d8539 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/LikeConditionTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/LikeConditionTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectOperator;
 import com.datatorrent.lib.streamquery.condition.LikeCondition;
@@ -32,45 +34,48 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class LikeConditionTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectOperator oper = new SelectOperator();
-  	oper.addIndex(new ColumnIndex("b", null));
-  	oper.addIndex(new ColumnIndex("c", null));
-  	
-  	LikeCondition  condition = new LikeCondition("a", "test*");
-  	oper.setCondition(condition);
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", "testing");
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", "null");
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", "testall");
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    SelectOperator oper = new SelectOperator();
+    oper.addIndex(new ColumnIndex("b", null));
+    oper.addIndex(new ColumnIndex("c", null));
+
+    LikeCondition  condition = new LikeCondition("a", "test*");
+    oper.setCondition(condition);
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", "testing");
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", "null");
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", "testall");
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(LikeConditionTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/NegateIndexTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/NegateIndexTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/NegateIndexTest.java
index b81e842..3ccb03e 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/NegateIndexTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/NegateIndexTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectOperator;
 import com.datatorrent.lib.streamquery.index.NegateExpression;
@@ -31,41 +33,43 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class NegateIndexTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectOperator oper = new SelectOperator();
-  	oper.addIndex(new NegateExpression("b", null));
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    SelectOperator oper = new SelectOperator();
+    oper.addIndex(new NegateExpression("b", null));
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(NegateIndexTest.class);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectAverageTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectAverageTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectAverageTest.java
index eac0657..5279dac 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectAverageTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectAverageTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectFunctionOperator;
 import com.datatorrent.lib.streamquery.function.AverageFunction;
@@ -31,41 +33,43 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class SelectAverageTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectFunctionOperator oper = new SelectFunctionOperator();
-  	oper.addSqlFunction(new AverageFunction("b", null));
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    SelectFunctionOperator oper = new SelectFunctionOperator();
+    oper.addSqlFunction(new AverageFunction("b", null));
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(SelectAverageTest.class);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectCountTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectCountTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectCountTest.java
index 243c946..9c235e1 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectCountTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectCountTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectFunctionOperator;
 import com.datatorrent.lib.streamquery.function.CountFunction;
@@ -31,41 +33,44 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class SelectCountTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectFunctionOperator oper = new SelectFunctionOperator();
-  	oper.addSqlFunction(new CountFunction("b", null));
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", null);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", null);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    SelectFunctionOperator oper = new SelectFunctionOperator();
+    oper.addSqlFunction(new CountFunction("b", null));
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", null);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", null);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(SelectCountTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectFirstLastTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectFirstLastTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectFirstLastTest.java
index fe9ed07..c7b56fe 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectFirstLastTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectFirstLastTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectFunctionOperator;
 import com.datatorrent.lib.streamquery.function.FirstLastFunction;
@@ -31,41 +33,44 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class SelectFirstLastTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectFunctionOperator oper = new SelectFunctionOperator();
-  	oper.addSqlFunction(new FirstLastFunction("b", null, false));
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", null);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", null);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    SelectFunctionOperator oper = new SelectFunctionOperator();
+    oper.addSqlFunction(new FirstLastFunction("b", null, false));
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", null);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", null);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(SelectFirstLastTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectMaxMinTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectMaxMinTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectMaxMinTest.java
index 6b06848..e57554a 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectMaxMinTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SelectMaxMinTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectFunctionOperator;
 import com.datatorrent.lib.streamquery.function.MaxMinFunction;
@@ -31,41 +33,44 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class SelectMaxMinTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectFunctionOperator oper = new SelectFunctionOperator();
-  	oper.addSqlFunction(new MaxMinFunction("b", null, false));
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    SelectFunctionOperator oper = new SelectFunctionOperator();
+    oper.addSqlFunction(new MaxMinFunction("b", null, false));
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(SelectMaxMinTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SumIndexTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SumIndexTest.java b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SumIndexTest.java
index c8e17c0..f9f1e10 100644
--- a/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SumIndexTest.java
+++ b/library/src/test/java/com/datatorrent/lib/streamquery/advanced/SumIndexTest.java
@@ -21,6 +21,8 @@ package com.datatorrent.lib.streamquery.advanced;
 import java.util.HashMap;
 
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.datatorrent.lib.streamquery.SelectOperator;
 import com.datatorrent.lib.streamquery.index.SumExpression;
@@ -31,41 +33,44 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  */
 public class SumIndexTest
 {
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Test
   public void testSqlSelect()
   {
-  	// create operator   
-  	SelectOperator oper = new SelectOperator();
-  	oper.addIndex(new SumExpression("b", "c", null));
-  	
-  	CollectorTestSink sink = new CollectorTestSink();
-  	oper.outport.setSink(sink);
-  	
-  	oper.setup(null);
-  	oper.beginWindow(1);
-  	
-  	HashMap<String, Object> tuple = new HashMap<String, Object>();
-  	tuple.put("a", 0);
-  	tuple.put("b", 1);
-  	tuple.put("c", 2);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 3);
-  	tuple.put("c", 4);
-  	oper.inport.process(tuple);
-  	
-  	tuple = new HashMap<String, Object>();
-  	tuple.put("a", 1);
-  	tuple.put("b", 5);
-  	tuple.put("c", 6);
-  	oper.inport.process(tuple);
-  	
-  	oper.endWindow();
-  	oper.teardown();
-  	
-  	System.out.println(sink.collectedTuples.toString());
+    // create operator
+    SelectOperator oper = new SelectOperator();
+    oper.addIndex(new SumExpression("b", "c", null));
+
+    CollectorTestSink sink = new CollectorTestSink();
+    oper.outport.setSink(sink);
+
+    oper.setup(null);
+    oper.beginWindow(1);
+
+    HashMap<String, Object> tuple = new HashMap<String, Object>();
+    tuple.put("a", 0);
+    tuple.put("b", 1);
+    tuple.put("c", 2);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 3);
+    tuple.put("c", 4);
+    oper.inport.process(tuple);
+
+    tuple = new HashMap<String, Object>();
+    tuple.put("a", 1);
+    tuple.put("b", 5);
+    tuple.put("c", 6);
+    oper.inport.process(tuple);
+
+    oper.endWindow();
+    oper.teardown();
+
+    LOG.debug("{}", sink.collectedTuples);
   }
+
+  private static final Logger LOG = LoggerFactory.getLogger(SumIndexTest.class);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java b/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java
index 75ca86d..4f231cb 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java
@@ -27,10 +27,11 @@ import javax.jms.MessageProducer;
 import javax.jms.Session;
 import javax.jms.TextMessage;
 
-import org.apache.activemq.ActiveMQConnectionFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.activemq.ActiveMQConnectionFactory;
+
 /**
  * This is the message generator outside of Malhar/Hadoop. This generates data
  * and send to ActiveMQ message bus so that Malhar input adapter for ActiveMQ
@@ -39,109 +40,109 @@ import org.slf4j.LoggerFactory;
  */
 public class ActiveMQMessageGenerator
 {
-	private static final Logger logger = LoggerFactory
-			.getLogger(ActiveMQMessageGenerator.class);
-	private Connection connection;
-	private Session session;
-	private Destination destination;
-	private MessageProducer producer;
-	public HashMap<Integer, String> sendData = new HashMap<Integer, String>();
-	public int sendCount = 0;
-	private int debugMessageCount = 0;
-	private String user = "";
-	private String password = "";
-	private String url = "tcp://localhost:61617";
-	private int ackMode = Session.CLIENT_ACKNOWLEDGE;
-	private String subject = "TEST.FOO";
-	private int messageSize = 255;
-	private long maximumSendMessages = 20; // 0 means unlimitted, this has to run
-																				 // in seperate thread for unlimitted
-	private boolean topic = false;
-	private boolean transacted = false;
-	private boolean verbose = false;
-
-	public ActiveMQMessageGenerator()
-	{
-	}
-
-	public void setDebugMessageCount(int count)
-	{
-		debugMessageCount = count;
-	}
-
-	/**
-	 * Setup connection, producer, consumer so on.
-	 * 
-	 * @throws JMSException
-	 */
-	public void setupConnection() throws JMSException
-	{
-		// Create connection
-		ActiveMQConnectionFactory connectionFactory;
-		connectionFactory = new ActiveMQConnectionFactory(user, password, url);
-
-		connection = connectionFactory.createConnection();
-		connection.start();
-
-		// Create session
-		session = connection.createSession(transacted, ackMode);
-
-		// Create destination
-		destination = topic ? session.createTopic(subject) : session
-				.createQueue(subject);
-
-		// Create producer
-		producer = session.createProducer(destination);
-	}
-
-	/**
-	 * Generate message and send it to ActiveMQ message bus.
-	 * 
-	 * @throws Exception
-	 */
-	public void sendMessage() throws Exception
-	{
-		for (int i = 1; i <= maximumSendMessages || maximumSendMessages == 0; i++) {
-
-			// Silly message
-			String myMsg = "My TestMessage " + i;
-			// String myMsg = "My TestMessage " + i + " sent at " + new Date();
-
-			if (myMsg.length() > messageSize) {
-				myMsg = myMsg.substring(0, messageSize);
-			}
-
-			TextMessage message = session.createTextMessage(myMsg);
-
-			producer.send(message);
-			// store it for testing later
-			sendData.put(i, myMsg);
-			sendCount++;
-
-			if (verbose) {
-				String msg = message.getText();
-				if (msg.length() > messageSize) {
-					msg = msg.substring(0, messageSize) + "...";
-				}
-				if (i <= debugMessageCount) {
-					System.out.println("[" + this + "] Sending message from generator: '"
-							+ msg + "'");
-				}
-			}
-		}
-	}
-
-	/**
-	 * Close connection resources.
-	 */
-	public void closeConnection()
-	{
-		try {
-			producer.close();
-			session.close();
-			connection.close();
-		} catch (JMSException ex) {
-			logger.debug(ex.getLocalizedMessage());
-		}
-	}
+  private static final Logger logger = LoggerFactory
+      .getLogger(ActiveMQMessageGenerator.class);
+  private Connection connection;
+  private Session session;
+  private Destination destination;
+  private MessageProducer producer;
+  public HashMap<Integer, String> sendData = new HashMap<Integer, String>();
+  public int sendCount = 0;
+  private int debugMessageCount = 0;
+  private String user = "";
+  private String password = "";
+  private String url = "tcp://localhost:61617";
+  private int ackMode = Session.CLIENT_ACKNOWLEDGE;
+  private String subject = "TEST.FOO";
+  private int messageSize = 255;
+  private long maximumSendMessages = 20; // 0 means unlimitted, this has to run
+                                         // in seperate thread for unlimitted
+  private boolean topic = false;
+  private boolean transacted = false;
+  private boolean verbose = false;
+
+  public ActiveMQMessageGenerator()
+  {
+  }
+
+  public void setDebugMessageCount(int count)
+  {
+    debugMessageCount = count;
+  }
+
+  /**
+   * Setup connection, producer, consumer so on.
+   *
+   * @throws JMSException
+   */
+  public void setupConnection() throws JMSException
+  {
+    // Create connection
+    ActiveMQConnectionFactory connectionFactory;
+    connectionFactory = new ActiveMQConnectionFactory(user, password, url);
+
+    connection = connectionFactory.createConnection();
+    connection.start();
+
+    // Create session
+    session = connection.createSession(transacted, ackMode);
+
+    // Create destination
+    destination = topic ? session.createTopic(subject) : session
+        .createQueue(subject);
+
+    // Create producer
+    producer = session.createProducer(destination);
+  }
+
+  /**
+   * Generate message and send it to ActiveMQ message bus.
+   *
+   * @throws Exception
+   */
+  public void sendMessage() throws Exception
+  {
+    for (int i = 1; i <= maximumSendMessages || maximumSendMessages == 0; i++) {
+
+      // Silly message
+      String myMsg = "My TestMessage " + i;
+      // String myMsg = "My TestMessage " + i + " sent at " + new Date();
+
+      if (myMsg.length() > messageSize) {
+        myMsg = myMsg.substring(0, messageSize);
+      }
+
+      TextMessage message = session.createTextMessage(myMsg);
+
+      producer.send(message);
+      // store it for testing later
+      sendData.put(i, myMsg);
+      sendCount++;
+
+      if (verbose) {
+        String msg = message.getText();
+        if (msg.length() > messageSize) {
+          msg = msg.substring(0, messageSize) + "...";
+        }
+        if (i <= debugMessageCount) {
+          System.out.println("[" + this + "] Sending message from generator: '"
+              + msg + "'");
+        }
+      }
+    }
+  }
+
+  /**
+   * Close connection resources.
+   */
+  public void closeConnection()
+  {
+    try {
+      producer.close();
+      session.close();
+      connection.close();
+    } catch (JMSException ex) {
+      logger.debug(ex.getLocalizedMessage());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/testbench/EventClassifierTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/EventClassifierTest.java b/library/src/test/java/com/datatorrent/lib/testbench/EventClassifierTest.java
index 2eb1219..e9b4873 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/EventClassifierTest.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/EventClassifierTest.java
@@ -40,217 +40,217 @@ import com.datatorrent.api.Sink;
  * <br>
  * Validates all DRC checks of the node<br>
  */
-public class EventClassifierTest {
+public class EventClassifierTest
+{
 
-    private static Logger LOG = LoggerFactory.getLogger(EventClassifier.class);
+  private static Logger LOG = LoggerFactory.getLogger(EventClassifier.class);
 
-    @SuppressWarnings("rawtypes")
-    class TestSink implements Sink {
+  @SuppressWarnings("rawtypes")
+  class TestSink implements Sink
+  {
 
-        HashMap<String, Integer> collectedTuples = new HashMap<String, Integer>();
-        HashMap<String, Double> collectedTupleValues = new HashMap<String, Double>();
+    HashMap<String, Integer> collectedTuples = new HashMap<String, Integer>();
+    HashMap<String, Double> collectedTupleValues = new HashMap<String, Double>();
 
-        int count = 0;
-        boolean dohash = true;
+    int count = 0;
+    boolean dohash = true;
 
-        /**
-         *
-         * @param payload
-         */
-        @SuppressWarnings("unchecked")
-        @Override
-        public void put(Object payload) {
-          count++;
-          if (dohash) {
-            HashMap<String, Double> tuple = (HashMap<String, Double>)payload;
-            for (Map.Entry<String, Double> e : tuple.entrySet()) {
-              Integer ival = collectedTuples.get(e.getKey());
-              if (ival == null) {
-                ival = new Integer(1);
-              }
-              else {
-                ival = ival + 1;
-              }
-              collectedTuples.put(e.getKey(), ival);
-              collectedTupleValues.put(e.getKey(), e.getValue());
-            }
+    /**
+     *
+     * @param payload
+     */
+    @SuppressWarnings("unchecked")
+    @Override
+    public void put(Object payload)
+    {
+      count++;
+      if (dohash) {
+        HashMap<String, Double> tuple = (HashMap<String, Double>)payload;
+        for (Map.Entry<String, Double> e : tuple.entrySet()) {
+          Integer ival = collectedTuples.get(e.getKey());
+          if (ival == null) {
+            ival = 1;
+          } else {
+            ival = ival + 1;
           }
+          collectedTuples.put(e.getKey(), ival);
+          collectedTupleValues.put(e.getKey(), e.getValue());
         }
-        /**
-         *
-         */
-        public void clear() {
-            collectedTuples.clear();
-            collectedTupleValues.clear();
-            count = 0;
-        }
+      }
+    }
+
+    /**
+     *
+     */
+    public void clear()
+    {
+      collectedTuples.clear();
+      collectedTupleValues.clear();
+      count = 0;
+    }
 
     @Override
     public int getCount(boolean reset)
     {
       throw new UnsupportedOperationException("Not supported yet.");
     }
-    }
+  }
 
-    /**
-     * Test node logic emits correct results
-     */
-    @SuppressWarnings("unchecked")
-    @Test
-    public void testNodeProcessing() throws Exception
-    {
+  /**
+   * Test node logic emits correct results
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void testNodeProcessing() throws Exception
+  {
 
-      EventClassifier node = new EventClassifier();
-      TestSink classifySink = new TestSink();
-      classifySink.dohash = true;
-      node.data.setSink(classifySink);
+    EventClassifier node = new EventClassifier();
+    TestSink classifySink = new TestSink();
+    classifySink.dohash = true;
+    node.data.setSink(classifySink);
 
-      HashMap<String, Double> keymap = new HashMap<String, Double>();
-      keymap.put("a", 1.0);
-      keymap.put("b", 4.0);
-      keymap.put("c", 5.0);
-      node.setKeyMap(keymap);
-      node.setOperationReplace();
+    HashMap<String, Double> keymap = new HashMap<String, Double>();
+    keymap.put("a", 1.0);
+    keymap.put("b", 4.0);
+    keymap.put("c", 5.0);
+    node.setKeyMap(keymap);
+    node.setOperationReplace();
 
-      int numTuples = 1000;
+    int numTuples = 1000;
 
-      HashMap<String, ArrayList<Integer>> wmap = new HashMap<String, ArrayList<Integer>>();
-      ArrayList<Integer> list = new ArrayList<Integer>(3);
-      list.add(60);
-      list.add(10);
-      list.add(35);
-      wmap.put("ia", list);
-      list = new ArrayList<Integer>(3);
-      list.add(10);
-      list.add(75);
-      list.add(15);
-      wmap.put("ib", list);
-      list = new ArrayList<Integer>(3);
-      list.add(20);
-      list.add(10);
-      list.add(70);
-      wmap.put("ic", list);
-      list = new ArrayList<Integer>(3);
-      list.add(50);
-      list.add(15);
-      list.add(35);
-      wmap.put("id", list);
-      node.setKeyWeights(wmap);
-      node.setup(null);
+    HashMap<String, ArrayList<Integer>> wmap = new HashMap<String, ArrayList<Integer>>();
+    ArrayList<Integer> list = new ArrayList<Integer>(3);
+    list.add(60);
+    list.add(10);
+    list.add(35);
+    wmap.put("ia", list);
+    list = new ArrayList<Integer>(3);
+    list.add(10);
+    list.add(75);
+    list.add(15);
+    wmap.put("ib", list);
+    list = new ArrayList<Integer>(3);
+    list.add(20);
+    list.add(10);
+    list.add(70);
+    wmap.put("ic", list);
+    list = new ArrayList<Integer>(3);
+    list.add(50);
+    list.add(15);
+    list.add(35);
+    wmap.put("id", list);
+    node.setKeyWeights(wmap);
+    node.setup(null);
 
-      HashMap<String, Double> input = new HashMap<String, Double>();
-      int sentval = 0;
-      for (int i = 0; i < numTuples; i++) {
-        input.clear();
-        input.put("ia", 2.0);
-        input.put("ib", 20.0);
-        input.put("ic", 1000.0);
-        input.put("id", 1000.0);
-        sentval += 4;
-        node.event.process(input);
-      }
-      node.endWindow();
-      int ival = 0;
-      if (classifySink.dohash) {
-        for (Map.Entry<String, Integer> e: classifySink.collectedTuples.entrySet()) {
-          ival += e.getValue().intValue();
-        }
-      }
-      else {
-        ival = classifySink.count;
+    HashMap<String, Double> input = new HashMap<String, Double>();
+    int sentval = 0;
+    for (int i = 0; i < numTuples; i++) {
+      input.clear();
+      input.put("ia", 2.0);
+      input.put("ib", 20.0);
+      input.put("ic", 1000.0);
+      input.put("id", 1000.0);
+      sentval += 4;
+      node.event.process(input);
+    }
+    node.endWindow();
+    int ival = 0;
+    if (classifySink.dohash) {
+      for (Map.Entry<String, Integer> e : classifySink.collectedTuples.entrySet()) {
+        ival += e.getValue().intValue();
       }
+    } else {
+      ival = classifySink.count;
+    }
 
-      LOG.info(String.format("\nThe number of keys in %d tuples are %d and %d",
-                             ival,
-                             classifySink.collectedTuples.size(),
-                             classifySink.collectedTupleValues.size()));
-      for (Map.Entry<String, Double> ve: classifySink.collectedTupleValues.entrySet()) {
-        Integer ieval = classifySink.collectedTuples.get(ve.getKey()); // ieval should not be null?
-        LOG.info(String.format("%d tuples of key \"%s\" has value %f", ieval.intValue(), ve.getKey(), ve.getValue()));
-      }
-      Assert.assertEquals("number emitted tuples", sentval, ival);
+    LOG.info(String.format("\nThe number of keys in %d tuples are %d and %d",
+        ival,
+        classifySink.collectedTuples.size(),
+        classifySink.collectedTupleValues.size()));
+    for (Map.Entry<String, Double> ve : classifySink.collectedTupleValues.entrySet()) {
+      Integer ieval = classifySink.collectedTuples.get(ve.getKey()); // ieval should not be null?
+      LOG.info(String.format("%d tuples of key \"%s\" has value %f", ieval.intValue(), ve.getKey(), ve.getValue()));
+    }
+    Assert.assertEquals("number emitted tuples", sentval, ival);
 
-      // Now test a node with no weights
-      EventClassifier nwnode = new EventClassifier();
-      classifySink.clear();
-      nwnode.data.setSink(classifySink);
-      nwnode.setKeyMap(keymap);
-      nwnode.setOperationReplace();
-      nwnode.setup(null);
+    // Now test a node with no weights
+    EventClassifier nwnode = new EventClassifier();
+    classifySink.clear();
+    nwnode.data.setSink(classifySink);
+    nwnode.setKeyMap(keymap);
+    nwnode.setOperationReplace();
+    nwnode.setup(null);
 
-      sentval = 0;
-      for (int i = 0; i < numTuples; i++) {
-        input.clear();
-        input.put("ia", 2.0);
-        input.put("ib", 20.0);
-        input.put("ic", 1000.0);
-        input.put("id", 1000.0);
-        sentval += 4;
-        nwnode.event.process(input);
-      }
-      nwnode.endWindow();
-      ival = 0;
-      if (classifySink.dohash) {
-        for (Map.Entry<String, Integer> e: classifySink.collectedTuples.entrySet()) {
-          ival += e.getValue().intValue();
-        }
-      }
-      else {
-        ival = classifySink.count;
-      }
-      LOG.info(String.format("\nThe number of keys in %d tuples are %d and %d",
-                             ival,
-                             classifySink.collectedTuples.size(),
-                             classifySink.collectedTupleValues.size()));
-      for (Map.Entry<String, Double> ve: classifySink.collectedTupleValues.entrySet()) {
-        Integer ieval = classifySink.collectedTuples.get(ve.getKey()); // ieval should not be null?
-        LOG.info(String.format("%d tuples of key \"%s\" has value %f", ieval.intValue(), ve.getKey(), ve.getValue()));
+    sentval = 0;
+    for (int i = 0; i < numTuples; i++) {
+      input.clear();
+      input.put("ia", 2.0);
+      input.put("ib", 20.0);
+      input.put("ic", 1000.0);
+      input.put("id", 1000.0);
+      sentval += 4;
+      nwnode.event.process(input);
+    }
+    nwnode.endWindow();
+    ival = 0;
+    if (classifySink.dohash) {
+      for (Map.Entry<String, Integer> e : classifySink.collectedTuples.entrySet()) {
+        ival += e.getValue().intValue();
       }
-      Assert.assertEquals("number emitted tuples", sentval, ival);
-
+    } else {
+      ival = classifySink.count;
+    }
+    LOG.info(String.format("\nThe number of keys in %d tuples are %d and %d",
+        ival,
+        classifySink.collectedTuples.size(),
+        classifySink.collectedTupleValues.size()));
+    for (Map.Entry<String, Double> ve : classifySink.collectedTupleValues.entrySet()) {
+      Integer ieval = classifySink.collectedTuples.get(ve.getKey()); // ieval should not be null?
+      LOG.info(String.format("%d tuples of key \"%s\" has value %f", ieval.intValue(), ve.getKey(), ve.getValue()));
+    }
+    Assert.assertEquals("number emitted tuples", sentval, ival);
 
-      // Now test a node with no weights and no values
-      EventClassifier nvnode = new EventClassifier();
-      classifySink.clear();
-      keymap.put("a", 0.0);
-      keymap.put("b", 0.0);
-      keymap.put("c", 0.0);
+    // Now test a node with no weights and no values
+    EventClassifier nvnode = new EventClassifier();
+    classifySink.clear();
+    keymap.put("a", 0.0);
+    keymap.put("b", 0.0);
+    keymap.put("c", 0.0);
 
-      nvnode.data.setSink(classifySink);
-      nvnode.setKeyMap(keymap);
-      nvnode.setOperationReplace();
-      nvnode.setup(null);
+    nvnode.data.setSink(classifySink);
+    nvnode.setKeyMap(keymap);
+    nvnode.setOperationReplace();
+    nvnode.setup(null);
 
-      sentval = 0;
-      for (int i = 0; i < numTuples; i++) {
-        input.clear();
-        input.put("ia", 2.0);
-        input.put("ib", 20.0);
-        input.put("ic", 500.0);
-        input.put("id", 1000.0);
-        sentval += 4;
-        nvnode.event.process(input);
-      }
+    sentval = 0;
+    for (int i = 0; i < numTuples; i++) {
+      input.clear();
+      input.put("ia", 2.0);
+      input.put("ib", 20.0);
+      input.put("ic", 500.0);
+      input.put("id", 1000.0);
+      sentval += 4;
+      nvnode.event.process(input);
+    }
     nvnode.endWindow();
     ival = 0;
     if (classifySink.dohash) {
-      for (Map.Entry<String, Integer> e: classifySink.collectedTuples.entrySet()) {
+      for (Map.Entry<String, Integer> e : classifySink.collectedTuples.entrySet()) {
         ival += e.getValue().intValue();
       }
-    }
-    else {
+    } else {
       ival = classifySink.count;
     }
     LOG.info(String.format("\nThe number of keys in %d tuples are %d and %d",
-                           ival,
-                           classifySink.collectedTuples.size(),
-                           classifySink.collectedTupleValues.size()));
-    for (Map.Entry<String, Double> ve: classifySink.collectedTupleValues.entrySet()) {
+        ival,
+        classifySink.collectedTuples.size(),
+        classifySink.collectedTupleValues.size()));
+    for (Map.Entry<String, Double> ve : classifySink.collectedTupleValues.entrySet()) {
       Integer ieval = classifySink.collectedTuples.get(ve.getKey()); // ieval should not be null?
       LOG.info(String.format("%d tuples of key \"%s\" has value %f",
-                             ieval.intValue(),
-                             ve.getKey(),
-                             ve.getValue()));
+          ieval.intValue(),
+          ve.getKey(),
+          ve.getValue()));
     }
     Assert.assertEquals("number emitted tuples", sentval, ival);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/testbench/EventGeneratorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/EventGeneratorTest.java b/library/src/test/java/com/datatorrent/lib/testbench/EventGeneratorTest.java
index f064845..47e209c 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/EventGeneratorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/EventGeneratorTest.java
@@ -18,14 +18,16 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.common.util.BaseOperator;
-import com.datatorrent.api.DefaultInputPort;
-
 import java.util.HashMap;
 import java.util.Map;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.common.util.BaseOperator;
+
 import static org.junit.Assert.assertTrue;
 
 /**
@@ -93,26 +95,26 @@ public class EventGeneratorTest
   }
 
   @SuppressWarnings({ "rawtypes", "unchecked" })
-	public void testSingleSchemaNodeProcessing(boolean stringschema) throws Exception
+  public void testSingleSchemaNodeProcessing(boolean stringschema) throws Exception
   {
-  	EventGenerator node = new EventGenerator();
+    EventGenerator node = new EventGenerator();
     node.setKeysHelper("a,b,c,d");
     node.setValuesHelper("");
     node.setWeightsHelper("10,40,20,30");
-  	CollectorTestSink count = new CollectorTestSink();
-  	node.count.setSink(count);
-  	CollectorTestSink data = new CollectorTestSink();
-  	node.string_data.setSink(data);
-  	CollectorTestSink hashData = new CollectorTestSink();
-  	node.hash_data.setSink(hashData);
+    CollectorTestSink count = new CollectorTestSink();
+    node.count.setSink(count);
+    CollectorTestSink data = new CollectorTestSink();
+    node.string_data.setSink(data);
+    CollectorTestSink hashData = new CollectorTestSink();
+    node.hash_data.setSink(hashData);
 
-  	node.setup(null);
-  	node.beginWindow(1);
-  	node.emitTuples();
-  	node.endWindow();
-  	node.teardown();
+    node.setup(null);
+    node.beginWindow(1);
+    node.emitTuples();
+    node.endWindow();
+    node.teardown();
 
-  	assertTrue("Default number of tuples generated", 10000 == data.collectedTuples.size());
+    assertTrue("Default number of tuples generated", 10000 == data.collectedTuples.size());
 
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/testbench/EventIncrementerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/EventIncrementerTest.java b/library/src/test/java/com/datatorrent/lib/testbench/EventIncrementerTest.java
index 1c120f3..0e9fc4d 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/EventIncrementerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/EventIncrementerTest.java
@@ -18,15 +18,16 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.api.Sink;
-import com.datatorrent.lib.testbench.EventIncrementer;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Sink;
+
 /**
  * Functional test for {@link com.datatorrent.lib.testbench.EventIncrementer}<p>
  * <br>
@@ -139,19 +140,20 @@ public class EventIncrementerTest
     int numtuples = 1000;
     String seed1 = "a";
     ArrayList val = new ArrayList();
-    val.add(new Integer(10));
-    val.add(new Integer(20));
+    val.add(10);
+    val.add(20);
     stuple.put(seed1, val);
     for (int i = 0; i < numtuples; i++) {
       seedSink.put(stuple);
     }
     oper.endWindow();
 
-    LOG.debug(String.format("\n*************************\nEmitted %d tuples, Processed %d tuples, Received %d tuples\n******************\n",
-                            numtuples,
-                            oper.tuple_count,
-                            dataSink.count));
-    for (Map.Entry<String, String> e: dataSink.collectedTuples.entrySet()) {
+    LOG.debug(String.format(
+        "\n*************************\nEmitted %d tuples, Processed %d tuples, Received %d tuples\n******************\n",
+        numtuples,
+        oper.tuple_count,
+        dataSink.count));
+    for (Map.Entry<String, String> e : dataSink.collectedTuples.entrySet()) {
       LOG.debug(String.format("Got key (%s) and value (%s)", e.getKey(), e.getValue()));
     }
 
@@ -159,12 +161,12 @@ public class EventIncrementerTest
 
     HashMap<String, Object> ixtuple = new HashMap<String, Object>(1);
     HashMap<String, Integer> ixval = new HashMap<String, Integer>(1);
-    ixval.put("x", new Integer(10));
+    ixval.put("x", 10);
     ixtuple.put("a", ixval);
 
     HashMap<String, Object> iytuple = new HashMap<String, Object>(1);
     HashMap<String, Integer> iyval = new HashMap<String, Integer>(1);
-    iyval.put("y", new Integer(10));
+    iyval.put("y", 10);
     iytuple.put("a", iyval);
 
     for (int i = 0; i < numtuples; i++) {
@@ -174,11 +176,12 @@ public class EventIncrementerTest
 
     oper.endWindow();
 
-    LOG.debug(String.format("\n*************************\nEmitted %d tuples, Processed %d tuples, Received %d tuples\n******************\n",
-                            numtuples*2,
-                            oper.tuple_count,
-                            countSink.count));
-     for (Map.Entry<String, String> e: dataSink.collectedTuples.entrySet()) {
+    LOG.debug(String.format(
+        "\n*************************\nEmitted %d tuples, Processed %d tuples, Received %d tuples\n******************\n",
+        numtuples * 2,
+        oper.tuple_count,
+        countSink.count));
+    for (Map.Entry<String, String> e : dataSink.collectedTuples.entrySet()) {
       LOG.debug(String.format("Got key (%s) and value (%s)", e.getKey(), e.getValue()));
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/testbench/FilteredEventClassifierTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/FilteredEventClassifierTest.java b/library/src/test/java/com/datatorrent/lib/testbench/FilteredEventClassifierTest.java
index 745c515..57c8829 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/FilteredEventClassifierTest.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/FilteredEventClassifierTest.java
@@ -18,15 +18,16 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.api.Sink;
-import com.datatorrent.lib.testbench.FilteredEventClassifier;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Sink;
+
 
 /**
  * Functional test for {@link com.datatorrent.lib.testbench.FilteredEventClassifier} for three configuration><p>
@@ -61,9 +62,8 @@ public class FilteredEventClassifierTest
       for (Map.Entry<String, Double> e : tuple.entrySet()) {
         Integer ival = collectedTuples.get(e.getKey());
         if (ival == null) {
-          ival = new Integer(1);
-        }
-        else {
+          ival = 1;
+        } else {
           ival = ival + 1;
         }
         collectedTuples.put(e.getKey(), ival);
@@ -162,14 +162,15 @@ public class FilteredEventClassifierTest
       ival += e.getValue().intValue();
     }
 
-    LOG.info(String.format("\n*******************************************************\nFiltered %d out of %d intuples with %d and %d unique keys",
-                           ival,
-                           sentval,
-                           classifySink.collectedTuples.size(),
-                           classifySink.collectedTupleValues.size()));
-    for (Map.Entry<String, Double> ve: classifySink.collectedTupleValues.entrySet()) {
+    LOG.info(String.format(
+        "\n*******************************************************\nFiltered %d out of %d intuples with %d and %d " + "unique keys",
+        ival,
+        sentval,
+        classifySink.collectedTuples.size(),
+        classifySink.collectedTupleValues.size()));
+    for (Map.Entry<String, Double> ve : classifySink.collectedTupleValues.entrySet()) {
       Integer ieval = classifySink.collectedTuples.get(ve.getKey()); // ieval should not be null?
-      LOG.info(String.format("%d tuples of key \"%s\" has value %f", ieval.intValue(), ve.getKey(), ve.getValue()));
+      LOG.info(String.format("%d tuples of key \"%s\" has value %f", ieval, ve.getKey(), ve.getValue()));
     }
 
     // Now test a node with no weights
@@ -207,12 +208,13 @@ public class FilteredEventClassifierTest
       ival += e.getValue().intValue();
     }
 
-    LOG.info(String.format("\n*******************************************************\nFiltered %d out of %d intuples with %d and %d unique keys",
-                           ival,
-                           sentval,
-                           classifySink.collectedTuples.size(),
-                           classifySink.collectedTupleValues.size()));
-    for (Map.Entry<String, Double> ve: classifySink.collectedTupleValues.entrySet()) {
+    LOG.info(String.format(
+        "\n*******************************************************\nFiltered %d out of %d intuples with %d and %d " + "unique keys",
+        ival,
+        sentval,
+        classifySink.collectedTuples.size(),
+        classifySink.collectedTupleValues.size()));
+    for (Map.Entry<String, Double> ve : classifySink.collectedTupleValues.entrySet()) {
       Integer ieval = classifySink.collectedTuples.get(ve.getKey()); // ieval should not be null?
       LOG.info(String.format("%d tuples of key \"%s\" has value %f", ieval.intValue(), ve.getKey(), ve.getValue()));
     }
@@ -252,21 +254,22 @@ public class FilteredEventClassifierTest
     }
     nvnode.endWindow();
     ival = 0;
-    for (Map.Entry<String, Integer> e: classifySink.collectedTuples.entrySet()) {
-      ival += e.getValue().intValue();
+    for (Map.Entry<String, Integer> e : classifySink.collectedTuples.entrySet()) {
+      ival += e.getValue();
     }
-    LOG.info(String.format("\n*******************************************************\nFiltered %d out of %d intuples with %d and %d unique keys",
-                           ival,
-                           sentval,
-                           classifySink.collectedTuples.size(),
-                           classifySink.collectedTupleValues.size()));
+    LOG.info(String.format(
+        "\n*******************************************************\nFiltered %d out of %d intuples with %d and %d " + "unique keys",
+        ival,
+        sentval,
+        classifySink.collectedTuples.size(),
+        classifySink.collectedTupleValues.size()));
 
-    for (Map.Entry<String, Double> ve: classifySink.collectedTupleValues.entrySet()) {
+    for (Map.Entry<String, Double> ve : classifySink.collectedTupleValues.entrySet()) {
       Integer ieval = classifySink.collectedTuples.get(ve.getKey()); // ieval should not be null?
       LOG.info(String.format("%d tuples of key \"%s\" has value %f",
-                             ieval.intValue(),
-                             ve.getKey(),
-                             ve.getValue()));
+          ieval,
+          ve.getKey(),
+          ve.getValue()));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java b/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java
index 5c692c4..6876525 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java
@@ -65,8 +65,8 @@ public class RandomEventGeneratorTest
     testSchemaNodeProcessing();
   }
 
-  @SuppressWarnings({ "rawtypes", "unchecked" })
-	public void testSchemaNodeProcessing() throws Exception
+  @SuppressWarnings({"rawtypes", "unchecked"})
+  public void testSchemaNodeProcessing() throws Exception
   {
     RandomEventGenerator node = new RandomEventGenerator();
     node.setMinvalue(0);
@@ -82,7 +82,7 @@ public class RandomEventGeneratorTest
     node.emitTuples();
     node.endWindow();
     node.teardown();
-    assertTrue("tuple blast" , integer_data.collectedTuples.size() == 5000);
-    assertTrue("tuple blast" , string_data.collectedTuples.size() == 5000);
+    assertTrue("tuple blast", integer_data.collectedTuples.size() == 5000);
+    assertTrue("tuple blast", string_data.collectedTuples.size() == 5000);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/testbench/SeedEventClassifierTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/SeedEventClassifierTest.java b/library/src/test/java/com/datatorrent/lib/testbench/SeedEventClassifierTest.java
index eb3c961..dd8d346 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/SeedEventClassifierTest.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/SeedEventClassifierTest.java
@@ -99,8 +99,7 @@ public class SeedEventClassifierTest
     Sink inSink2 = oper.data2.getSink();
     if (isstring) {
       oper.string_data.setSink(classifySink);
-    }
-    else {
+    } else {
       oper.hash_data.setSink(hashSink);
     }
 
@@ -122,12 +121,11 @@ public class SeedEventClassifierTest
           inSink2.put(input);
         }
       }
-    }
-    else {
+    } else {
       Integer input;
       for (int j = 0; j < 5; j++) {
         for (int i = 0; i < numTuples; i++) {
-          input = new Integer(i);
+          input = i;
           inSink1.put(input);
           inSink2.put(input);
         }
@@ -137,14 +135,13 @@ public class SeedEventClassifierTest
     if (isstring) {
       Assert.assertEquals("number emitted tuples", numTuples * 2 * 5, classifySink.count);
       LOG.debug(String.format("\n********************\nProcessed %d tuples with %d uniques\n********************\n",
-                              classifySink.count,
-                              classifySink.collectedTuples.size()));
-    }
-    else {
+          classifySink.count,
+          classifySink.collectedTuples.size()));
+    } else {
       Assert.assertEquals("number emitted tuples", numTuples * 2 * 5, hashSink.count);
       LOG.debug(String.format("\n********************\nProcessed %d tuples with %d uniques\n********************\n",
-                              hashSink.count,
-                              hashSink.collectedTuples.size()));
+          hashSink.count,
+          hashSink.collectedTuples.size()));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/testbench/SeedEventGeneratorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/SeedEventGeneratorTest.java b/library/src/test/java/com/datatorrent/lib/testbench/SeedEventGeneratorTest.java
index 80c75d6..cdafecf 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/SeedEventGeneratorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/SeedEventGeneratorTest.java
@@ -18,11 +18,10 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.api.Operator.ShutdownException;
-import com.datatorrent.lib.testbench.SeedEventGenerator;
-
 import org.junit.Test;
 
+import com.datatorrent.api.Operator.ShutdownException;
+
 import static org.junit.Assert.assertTrue;
 
 /**
@@ -50,7 +49,7 @@ public class SeedEventGeneratorTest
   }
 
   @SuppressWarnings({ "rawtypes", "unchecked" })
-	public void testSchemaNodeProcessing(boolean doseedkey) throws Exception
+  public void testSchemaNodeProcessing(boolean doseedkey) throws Exception
   {
     SeedEventGenerator node = new SeedEventGenerator();
     if (doseedkey) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/testbench/ThroughputCounterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/ThroughputCounterTest.java b/library/src/test/java/com/datatorrent/lib/testbench/ThroughputCounterTest.java
index 8fb8a3e..2becda7 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/ThroughputCounterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/ThroughputCounterTest.java
@@ -18,13 +18,14 @@
  */
 package com.datatorrent.lib.testbench;
 
-import com.datatorrent.api.Sink;
-import com.datatorrent.lib.testbench.ThroughputCounter;
 import java.util.HashMap;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Sink;
+
 /**
  *
  * Functional tests for {@link com.datatorrent.lib.testbench.ThroughputCounter}. <p>
@@ -38,9 +39,10 @@ import org.slf4j.LoggerFactory;
  * DRC checks are validated<br>
  *
  */
-public class ThroughputCounterTest {
+public class ThroughputCounterTest
+{
 
-    private static Logger log = LoggerFactory.getLogger(ThroughputCounterTest.class);
+  private static Logger log = LoggerFactory.getLogger(ThroughputCounterTest.class);
 
   @SuppressWarnings("rawtypes")
   class TestCountSink implements Sink
@@ -56,8 +58,8 @@ public class ThroughputCounterTest {
     public void put(Object payload)
     {
       HashMap<String, Number> tuples = (HashMap<String, Number>)payload;
-      average = ((Long)tuples.get(ThroughputCounter.OPORT_COUNT_TUPLE_AVERAGE)).longValue();
-      count += ((Long)tuples.get(ThroughputCounter.OPORT_COUNT_TUPLE_COUNT)).longValue();
+      average = (Long)tuples.get(ThroughputCounter.OPORT_COUNT_TUPLE_AVERAGE);
+      count += (Long)tuples.get(ThroughputCounter.OPORT_COUNT_TUPLE_COUNT);
     }
 
     @Override
@@ -85,8 +87,8 @@ public class ThroughputCounterTest {
     HashMap<String, Integer> input;
     int aint = 1000;
     int bint = 100;
-    Integer aval = new Integer(aint);
-    Integer bval = new Integer(bint);
+    Integer aval = aint;
+    Integer bval = bint;
     long ntot = aint + bint;
     long numtuples = 1000;
     long sentval = 0;
@@ -99,10 +101,11 @@ public class ThroughputCounterTest {
     }
     node.endWindow();
 
-    log.info(String.format("\n*******************************************************\nGot average per sec(%d), count(got %d, expected %d), numtuples(%d)",
-                           countSink.average,
-                           countSink.count,
-                           ntot * numtuples,
-                           sentval));
+    log.info(String.format(
+        "\n*******************************************************\nGot average per sec(%d), count(got %d, expected " + "%d), numtuples(%d)",
+        countSink.average,
+        countSink.count,
+        ntot * numtuples,
+        sentval));
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/transform/TransformOperatorAppTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/transform/TransformOperatorAppTest.java b/library/src/test/java/com/datatorrent/lib/transform/TransformOperatorAppTest.java
index fc8040b..643fb04 100644
--- a/library/src/test/java/com/datatorrent/lib/transform/TransformOperatorAppTest.java
+++ b/library/src/test/java/com/datatorrent/lib/transform/TransformOperatorAppTest.java
@@ -55,7 +55,8 @@ public class TransformOperatorAppTest
 
   public static class Application implements StreamingApplication
   {
-    @Override public void populateDAG(DAG dag, Configuration configuration)
+    @Override
+    public void populateDAG(DAG dag, Configuration configuration)
     {
       DummyInputGenerator input = dag.addOperator("Input", new DummyInputGenerator());
       TransformOperator transform = dag.addOperator("Transform", new TransformOperator());
@@ -106,24 +107,29 @@ public class TransformOperatorAppTest
   {
     public final transient DefaultOutputPort<TestPojo> output = new DefaultOutputPort<>();
 
-    @Override public void emitTuples()
+    @Override
+    public void emitTuples()
     {
       output.emit(new TestPojo("FirstName", "LastName"));
     }
 
-    @Override public void beginWindow(long l)
+    @Override
+    public void beginWindow(long l)
     {
     }
 
-    @Override public void endWindow()
+    @Override
+    public void endWindow()
     {
     }
 
-    @Override public void setup(Context.OperatorContext context)
+    @Override
+    public void setup(Context.OperatorContext context)
     {
     }
 
-    @Override public void teardown()
+    @Override
+    public void teardown()
     {
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperatorTest.java b/library/src/test/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperatorTest.java
index 0b681ad..52d27b8 100644
--- a/library/src/test/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/util/DimensionTimeBucketSumOperatorTest.java
@@ -18,21 +18,22 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.lib.testbench.CollectorTestSink;
-
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.lang.mutable.MutableDouble;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.commons.lang.mutable.MutableDouble;
+
 import com.google.common.collect.Maps;
 
+import com.datatorrent.lib.testbench.CollectorTestSink;
+
 @SuppressWarnings({"rawtypes", "unchecked"})
 public class DimensionTimeBucketSumOperatorTest
 {
@@ -69,8 +70,8 @@ public class DimensionTimeBucketSumOperatorTest
     dimensionKey.add("url");
     try {
       oper.addCombination(dimensionKey);
-    }
-    catch (NoSuchFieldException e) {
+    } catch (NoSuchFieldException e) {
+      //ignored
     }
     oper.setTimeBucketFlags(AbstractDimensionTimeBucketOperator.TIMEBUCKET_MINUTE);
     oper.setup(null);

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/util/JavaScriptFilterOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/util/JavaScriptFilterOperatorTest.java b/library/src/test/java/com/datatorrent/lib/util/JavaScriptFilterOperatorTest.java
index 6afa03d..6164104 100644
--- a/library/src/test/java/com/datatorrent/lib/util/JavaScriptFilterOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/util/JavaScriptFilterOperatorTest.java
@@ -18,12 +18,14 @@
  */
 package com.datatorrent.lib.util;
 
-import com.datatorrent.lib.testbench.CountAndLastTupleTestSink;
 import java.util.HashMap;
 import java.util.Map;
+
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.datatorrent.lib.testbench.CountAndLastTupleTestSink;
+
 /**
  *
  * functional test for {@link com.datatorrent.lib.util.JavaScriptFilterOperator}.

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/util/KryoCloneUtilsTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/util/KryoCloneUtilsTest.java b/library/src/test/java/com/datatorrent/lib/util/KryoCloneUtilsTest.java
index 5eaea2e..5f284a9 100644
--- a/library/src/test/java/com/datatorrent/lib/util/KryoCloneUtilsTest.java
+++ b/library/src/test/java/com/datatorrent/lib/util/KryoCloneUtilsTest.java
@@ -68,7 +68,8 @@ public class KryoCloneUtilsTest
     assertFalse(from.transientProp.equals(to.transientProp));
   }
 
-  private TestEntity getTestEntity(int depth) {
+  private TestEntity getTestEntity(int depth)
+  {
     TestEntity returnVal = null;
     TestEntity curr = null;
     while (depth-- > 0) {
@@ -82,8 +83,8 @@ public class KryoCloneUtilsTest
     return returnVal;
   }
 
-
-  static class TestEntity {
+  static class TestEntity
+  {
 
     String strProp = RandomStringUtils.random(10);
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/util/PojoUtilsTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/util/PojoUtilsTest.java b/library/src/test/java/com/datatorrent/lib/util/PojoUtilsTest.java
index 138c731..e8553be 100644
--- a/library/src/test/java/com/datatorrent/lib/util/PojoUtilsTest.java
+++ b/library/src/test/java/com/datatorrent/lib/util/PojoUtilsTest.java
@@ -18,6 +18,26 @@
  */
 package com.datatorrent.lib.util;
 
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.datatorrent.lib.expression.Expression;
+import com.datatorrent.lib.util.PojoUtils.Getter;
+import com.datatorrent.lib.util.PojoUtils.GetterBoolean;
+import com.datatorrent.lib.util.PojoUtils.GetterByte;
+import com.datatorrent.lib.util.PojoUtils.GetterChar;
+import com.datatorrent.lib.util.PojoUtils.GetterDouble;
+import com.datatorrent.lib.util.PojoUtils.GetterFloat;
+import com.datatorrent.lib.util.PojoUtils.GetterInt;
+import com.datatorrent.lib.util.PojoUtils.GetterLong;
+import com.datatorrent.lib.util.PojoUtils.GetterShort;
+import com.datatorrent.lib.util.PojoUtils.Setter;
+import com.datatorrent.lib.util.PojoUtils.SetterBoolean;
+import com.datatorrent.lib.util.PojoUtils.SetterByte;
+import com.datatorrent.lib.util.PojoUtils.SetterInt;
+import com.datatorrent.lib.util.PojoUtils.SetterLong;
+import com.datatorrent.lib.util.PojoUtils.SetterShort;
+
 import static com.datatorrent.lib.util.PojoUtils.constructGetter;
 import static com.datatorrent.lib.util.PojoUtils.constructSetter;
 import static com.datatorrent.lib.util.PojoUtils.createExpression;
@@ -45,26 +65,6 @@ import static org.junit.Assert.assertSame;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import org.junit.Assert;
-import org.junit.Test;
-
-import com.datatorrent.lib.expression.Expression;
-import com.datatorrent.lib.util.PojoUtils.GetterBoolean;
-import com.datatorrent.lib.util.PojoUtils.GetterByte;
-import com.datatorrent.lib.util.PojoUtils.GetterChar;
-import com.datatorrent.lib.util.PojoUtils.GetterDouble;
-import com.datatorrent.lib.util.PojoUtils.GetterFloat;
-import com.datatorrent.lib.util.PojoUtils.GetterInt;
-import com.datatorrent.lib.util.PojoUtils.GetterLong;
-import com.datatorrent.lib.util.PojoUtils.Getter;
-import com.datatorrent.lib.util.PojoUtils.GetterShort;
-import com.datatorrent.lib.util.PojoUtils.Setter;
-import com.datatorrent.lib.util.PojoUtils.SetterBoolean;
-import com.datatorrent.lib.util.PojoUtils.SetterByte;
-import com.datatorrent.lib.util.PojoUtils.SetterInt;
-import com.datatorrent.lib.util.PojoUtils.SetterLong;
-import com.datatorrent.lib.util.PojoUtils.SetterShort;
-
 
 
 public class PojoUtilsTest
@@ -288,7 +288,8 @@ public class PojoUtilsTest
     }
 
     @SuppressWarnings("unused")
-    public void setIntVal(Integer intVal) {
+    public void setIntVal(Integer intVal)
+    {
       intField = intVal;
     }
 
@@ -409,31 +410,33 @@ public class PojoUtilsTest
     assertEquals(8, testPojo.getIntVal());
 
     SetterByte<Object> setterByte = createSetterByte(testPojoClass, TestPojo.INT_FIELD_NAME);
-    setterByte.set(testPojo, (byte) 9);
+    setterByte.set(testPojo, (byte)9);
     assertEquals(9, testPojo.intField);
 
     setterByte = (SetterByte<Object>)constructSetter(testPojoClass, TestPojo.INT_FIELD_NAME, byte.class);
-    setterByte.set(testPojo, (byte) 10);
+    setterByte.set(testPojo, (byte)10);
     assertEquals(10, testPojo.intField);
 
     setterByte = createSetterByte(testPojoClass, TestPojo.INT_METHOD_NAME);
-    setterByte.set(testPojo, (byte) 11);
+    setterByte.set(testPojo, (byte)11);
     assertEquals(11, testPojo.getIntVal());
 
     setterByte = ((SetterByte<Object>)constructSetter(testPojoClass, TestPojo.INT_METHOD_NAME, byte.class));
-    setterByte.set(testPojo, (byte) 12);
+    setterByte.set(testPojo, (byte)12);
     assertEquals(12, testPojo.getIntVal());
 
-    createSetter(testPojoClass, TestPojo.INT_FIELD_NAME, Byte.class).set(testPojo, Byte.valueOf((byte) 13));
+    createSetter(testPojoClass, TestPojo.INT_FIELD_NAME, Byte.class).set(testPojo, Byte.valueOf((byte)13));
     assertEquals(13, testPojo.intField);
 
-    ((Setter<Object, Byte>)constructSetter(testPojoClass, TestPojo.INT_FIELD_NAME, Byte.class)).set(testPojo, Byte.valueOf((byte) 14));
+    ((Setter<Object, Byte>)constructSetter(testPojoClass, TestPojo.INT_FIELD_NAME, Byte.class)).set(testPojo,
+        Byte.valueOf((byte)14));
     assertEquals(14, testPojo.getIntVal());
 
-    createSetter(testPojoClass, TestPojo.INT_METHOD_NAME, Byte.class).set(testPojo, Byte.valueOf((byte) 15));
+    createSetter(testPojoClass, TestPojo.INT_METHOD_NAME, Byte.class).set(testPojo, Byte.valueOf((byte)15));
     assertEquals(15, testPojo.getIntVal());
 
-    ((Setter<Object, Byte>)constructSetter(testPojoClass, TestPojo.INT_METHOD_NAME, Byte.class)).set(testPojo, Byte.valueOf((byte) 16));
+    ((Setter<Object, Byte>)constructSetter(testPojoClass, TestPojo.INT_METHOD_NAME, Byte.class)).set(testPojo,
+        Byte.valueOf((byte)16));
     assertEquals(16, testPojo.getIntVal());
 
     SetterShort<Object> setterShort = createSetterShort(testPojoClass, TestPojo.INT_FIELD_NAME);
@@ -448,8 +451,8 @@ public class PojoUtilsTest
       @SuppressWarnings("unused")
       SetterLong<Object> setterLong = createSetterLong(testPojoClass, TestPojo.INT_FIELD_NAME);
       fail("long can't be assigned to the int field");
-    }
-    catch (Exception ignored) {
+    } catch (Exception ignored) {
+      //ignored
     }
 
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/com/datatorrent/lib/util/TestUtils.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/util/TestUtils.java b/library/src/test/java/com/datatorrent/lib/util/TestUtils.java
index 55351fc..f3b2140 100644
--- a/library/src/test/java/com/datatorrent/lib/util/TestUtils.java
+++ b/library/src/test/java/com/datatorrent/lib/util/TestUtils.java
@@ -58,11 +58,11 @@ public class TestUtils
     FileUtils.deleteQuietly(new File("target/" + description.getClassName()));
   }
   
-  @SuppressWarnings({ "unchecked", "rawtypes" })
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public static <S extends Sink, T> S setSink(OutputPort<T> port, S sink)
   {
-     port.setSink(sink);
-     return sink;
+    port.setSink(sink);
+    return sink;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/org/apache/apex/malhar/lib/dimensions/CustomTimeBucketRegistryTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/dimensions/CustomTimeBucketRegistryTest.java b/library/src/test/java/org/apache/apex/malhar/lib/dimensions/CustomTimeBucketRegistryTest.java
index c9524b1..c448b72 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/dimensions/CustomTimeBucketRegistryTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/dimensions/CustomTimeBucketRegistryTest.java
@@ -21,8 +21,6 @@ package org.apache.apex.malhar.lib.dimensions;
 import org.junit.Assert;
 import org.junit.Test;
 
-import org.apache.apex.malhar.lib.dimensions.CustomTimeBucketRegistry;
-
 import com.datatorrent.lib.appdata.schemas.CustomTimeBucket;
 import com.datatorrent.lib.appdata.schemas.TimeBucket;
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/org/apache/apex/malhar/lib/dimensions/DimensionsDescriptorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/dimensions/DimensionsDescriptorTest.java b/library/src/test/java/org/apache/apex/malhar/lib/dimensions/DimensionsDescriptorTest.java
index 3101577..1514cd5 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/dimensions/DimensionsDescriptorTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/dimensions/DimensionsDescriptorTest.java
@@ -24,8 +24,6 @@ import java.util.concurrent.TimeUnit;
 import org.junit.Assert;
 import org.junit.Test;
 
-import org.apache.apex.malhar.lib.dimensions.DimensionsDescriptor;
-
 import com.google.common.collect.Sets;
 
 import com.datatorrent.lib.appdata.schemas.CustomTimeBucket;
@@ -60,9 +58,8 @@ public class DimensionsDescriptorTest
   @Test
   public void simpleTest2()
   {
-    DimensionsDescriptor ad = new DimensionsDescriptor(KEY_1_NAME +
-                                                       DimensionsDescriptor.DELIMETER_SEPERATOR +
-                                                       KEY_2_NAME);
+    DimensionsDescriptor ad = new DimensionsDescriptor(KEY_1_NAME + DimensionsDescriptor.DELIMETER_SEPERATOR +
+        KEY_2_NAME);
 
     Set<String> fields = Sets.newHashSet();
     fields.add(KEY_1_NAME);
@@ -75,11 +72,8 @@ public class DimensionsDescriptorTest
   @Test
   public void simpleTimeTest()
   {
-    DimensionsDescriptor ad = new DimensionsDescriptor(KEY_1_NAME +
-                                                       DimensionsDescriptor.DELIMETER_SEPERATOR +
-                                                       DimensionsDescriptor.DIMENSION_TIME +
-                                                       DimensionsDescriptor.DELIMETER_EQUALS +
-                                                       "DAYS");
+    DimensionsDescriptor ad = new DimensionsDescriptor(KEY_1_NAME + DimensionsDescriptor.DELIMETER_SEPERATOR +
+        DimensionsDescriptor.DIMENSION_TIME + DimensionsDescriptor.DELIMETER_EQUALS + "DAYS");
 
     Set<String> fields = Sets.newHashSet();
     fields.add(KEY_1_NAME);
@@ -92,10 +86,10 @@ public class DimensionsDescriptorTest
   public void equalsAndHashCodeTest()
   {
     DimensionsDescriptor ddA = new DimensionsDescriptor(new CustomTimeBucket(TimeBucket.MINUTE, 5L),
-                                                        new Fields(Sets.newHashSet("a", "b")));
+        new Fields(Sets.newHashSet("a", "b")));
 
     DimensionsDescriptor ddB = new DimensionsDescriptor(new CustomTimeBucket(TimeBucket.MINUTE, 5L),
-                                                        new Fields(Sets.newHashSet("a", "b")));
+        new Fields(Sets.newHashSet("a", "b")));
 
     Assert.assertTrue(ddB.equals(ddA));
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/test/java/org/apache/apex/malhar/lib/state/managed/ManagedStateTestUtils.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/state/managed/ManagedStateTestUtils.java b/library/src/test/java/org/apache/apex/malhar/lib/state/managed/ManagedStateTestUtils.java
index 4ba8905..d3564ba 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/state/managed/ManagedStateTestUtils.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/state/managed/ManagedStateTestUtils.java
@@ -18,7 +18,6 @@
  */
 package org.apache.apex.malhar.lib.state.managed;
 
-import java.io.File;
 import java.io.IOException;
 import java.util.Map;
 import java.util.TreeMap;
@@ -26,11 +25,9 @@ import java.util.TreeMap;
 import javax.annotation.Nullable;
 
 import org.junit.Assert;
-import org.junit.runner.Description;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.RemoteIterator;
 



[20/22] incubator-apex-malhar git commit: APEXMALHAR-2095 removed checkstyle violations of malhar library module

Posted by th...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOMutable.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOMutable.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOMutable.java
index 274fbf2..3609f2e 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOMutable.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOMutable.java
@@ -78,70 +78,70 @@ public class GPOMutable implements Serializable
 
     {
       boolean[] oldFieldsBoolean = gpo.getFieldsBoolean();
-      if(oldFieldsBoolean != null) {
+      if (oldFieldsBoolean != null) {
         System.arraycopy(oldFieldsBoolean, 0, fieldsBoolean, 0, fieldsBoolean.length);
       }
     }
 
     {
       char[] oldFieldsChar = gpo.getFieldsCharacter();
-      if(oldFieldsChar != null) {
+      if (oldFieldsChar != null) {
         System.arraycopy(oldFieldsChar, 0, fieldsCharacter, 0, fieldsCharacter.length);
       }
     }
 
     {
       byte[] oldFieldsByte = gpo.getFieldsByte();
-      if(oldFieldsByte != null) {
+      if (oldFieldsByte != null) {
         System.arraycopy(oldFieldsByte, 0, fieldsByte, 0, fieldsByte.length);
       }
     }
 
     {
       short[] oldFieldsShort = gpo.getFieldsShort();
-      if(oldFieldsShort != null) {
+      if (oldFieldsShort != null) {
         System.arraycopy(oldFieldsShort, 0, fieldsShort, 0, fieldsShort.length);
       }
     }
 
     {
       int[] oldFieldsInteger = gpo.getFieldsInteger();
-      if(oldFieldsInteger != null) {
+      if (oldFieldsInteger != null) {
         System.arraycopy(oldFieldsInteger, 0, fieldsInteger, 0, fieldsInteger.length);
       }
     }
 
     {
       long[] oldFieldsLong = gpo.getFieldsLong();
-      if(oldFieldsLong != null) {
+      if (oldFieldsLong != null) {
         System.arraycopy(oldFieldsLong, 0, fieldsLong, 0, fieldsLong.length);
       }
     }
 
     {
       float[] oldFieldsFloat = gpo.getFieldsFloat();
-      if(oldFieldsFloat != null) {
+      if (oldFieldsFloat != null) {
         System.arraycopy(oldFieldsFloat, 0, fieldsFloat, 0, fieldsFloat.length);
       }
     }
 
     {
       double[] oldFieldsDouble = gpo.getFieldsDouble();
-      if(oldFieldsDouble != null) {
+      if (oldFieldsDouble != null) {
         System.arraycopy(oldFieldsDouble, 0, fieldsDouble, 0, fieldsDouble.length);
       }
     }
 
     {
       String[] oldFieldsString = gpo.getFieldsString();
-      if(oldFieldsString != null) {
+      if (oldFieldsString != null) {
         System.arraycopy(oldFieldsString, 0, fieldsString, 0, fieldsString.length);
       }
     }
 
     {
       Object[] oldFieldsObject = gpo.getFieldsObject();
-      if(oldFieldsObject != null) {
+      if (oldFieldsObject != null) {
         System.arraycopy(oldFieldsObject, 0, fieldsObject, 0, fieldsObject.length);
       }
     }
@@ -152,14 +152,13 @@ public class GPOMutable implements Serializable
    * @param gpo The {@link GPOMutable} object to copy fields from.
    * @param subsetFields The fields to copy from the given GPOMutable object.
    */
-  public GPOMutable(GPOMutable gpo,
-                    Fields subsetFields)
+  public GPOMutable(GPOMutable gpo, Fields subsetFields)
   {
     this(gpo.getFieldDescriptor().getSubset(subsetFields));
 
     initialize();
 
-    for(String field: this.getFieldDescriptor().getFields().getFields()) {
+    for (String field : this.getFieldDescriptor().getFields().getFields()) {
       this.setFieldGeneric(field, gpo.getField(field));
     }
   }
@@ -182,9 +181,9 @@ public class GPOMutable implements Serializable
    */
   private void initialize()
   {
-    for(Type type: fieldDescriptor.getTypeToFields().keySet()) {
+    for (Type type : fieldDescriptor.getTypeToFields().keySet()) {
       int size = fieldDescriptor.getTypeToSize().get(type);
-      switch(type) {
+      switch (type) {
         case BOOLEAN: {
           fieldsBoolean = new boolean[size];
           break;
@@ -348,47 +347,47 @@ public class GPOMutable implements Serializable
   {
     Type type = fieldDescriptor.getType(field);
 
-    if(type == null) {
+    if (type == null) {
       throw new IllegalArgumentException(field + " is not a valid field of this object.");
     }
 
     int index = fieldDescriptor.getTypeToFieldToIndex().get(type).get(field);
 
-    switch(type) {
+    switch (type) {
       case BOOLEAN: {
-        fieldsBoolean[index] = (Boolean) val;
+        fieldsBoolean[index] = (Boolean)val;
         break;
       }
       case CHAR: {
-        fieldsCharacter[index] = (Character) val;
+        fieldsCharacter[index] = (Character)val;
         break;
       }
       case STRING: {
-        fieldsString[index] = (String) val;
+        fieldsString[index] = (String)val;
         break;
       }
       case BYTE: {
-        fieldsByte[index] = (Byte) val;
+        fieldsByte[index] = (Byte)val;
         break;
       }
       case SHORT: {
-        fieldsShort[index] = (Short) val;
+        fieldsShort[index] = (Short)val;
         break;
       }
       case INTEGER: {
-        fieldsInteger[index] = (Integer) val;
+        fieldsInteger[index] = (Integer)val;
         break;
       }
       case LONG: {
-        fieldsLong[index] = (Long) val;
+        fieldsLong[index] = (Long)val;
         break;
       }
       case FLOAT: {
-        fieldsFloat[index] = (Float) val;
+        fieldsFloat[index] = (Float)val;
         break;
       }
       case DOUBLE: {
-        fieldsDouble[index] = (Double) val;
+        fieldsDouble[index] = (Double)val;
         break;
       }
       case OBJECT: {
@@ -409,13 +408,13 @@ public class GPOMutable implements Serializable
   {
     Type type = fieldDescriptor.getType(field);
 
-    if(type == null) {
+    if (type == null) {
       throw new IllegalArgumentException(field + " is not a valid field of this object.");
     }
 
     int index = fieldDescriptor.getTypeToFieldToIndex().get(type).get(field);
 
-    switch(type) {
+    switch (type) {
       case BOOLEAN: {
         return fieldsBoolean[index];
       }
@@ -677,15 +676,14 @@ public class GPOMutable implements Serializable
   private void throwInvalidField(String field, Type type)
   {
     Type fieldType = fieldDescriptor.getType(field);
-    if(fieldType == null || !fieldType.equals(type)) {
-      throw new IllegalArgumentException(field + " is not a valid field of type " +
-                                         type + " on this object.");
+    if (fieldType == null || !fieldType.equals(type)) {
+      throw new IllegalArgumentException(field + " is not a valid field of type " + type + " on this object.");
     }
   }
 
   public void applyObjectPayloadFix()
   {
-    if(fieldDescriptor.getSerdePayloadFix() != null) {
+    if (fieldDescriptor.getSerdePayloadFix() != null) {
       fieldDescriptor.getSerdePayloadFix().fix(fieldsObject);
     }
   }
@@ -710,44 +708,45 @@ public class GPOMutable implements Serializable
   @Override
   public boolean equals(Object obj)
   {
-    if(obj == null) {
+    if (obj == null) {
       return false;
     }
-    if(!(obj instanceof GPOMutable)) {
+    if (!(obj instanceof GPOMutable)) {
       return false;
     }
     final GPOMutable other = (GPOMutable)obj;
-    if(!Arrays.equals(this.fieldsBoolean, other.fieldsBoolean)) {
+    if (!Arrays.equals(this.fieldsBoolean, other.fieldsBoolean)) {
       return false;
     }
-    if(!Arrays.equals(this.fieldsCharacter, other.fieldsCharacter)) {
+    if (!Arrays.equals(this.fieldsCharacter, other.fieldsCharacter)) {
       return false;
     }
-    if(!Arrays.equals(this.fieldsByte, other.fieldsByte)) {
+    if (!Arrays.equals(this.fieldsByte, other.fieldsByte)) {
       return false;
     }
-    if(!Arrays.equals(this.fieldsShort, other.fieldsShort)) {
+    if (!Arrays.equals(this.fieldsShort, other.fieldsShort)) {
       return false;
     }
-    if(!Arrays.equals(this.fieldsInteger, other.fieldsInteger)) {
+    if (!Arrays.equals(this.fieldsInteger, other.fieldsInteger)) {
       return false;
     }
-    if(!Arrays.equals(this.fieldsLong, other.fieldsLong)) {
+    if (!Arrays.equals(this.fieldsLong, other.fieldsLong)) {
       return false;
     }
-    if(!Arrays.equals(this.fieldsFloat, other.fieldsFloat)) {
+    if (!Arrays.equals(this.fieldsFloat, other.fieldsFloat)) {
       return false;
     }
-    if(!Arrays.equals(this.fieldsDouble, other.fieldsDouble)) {
+    if (!Arrays.equals(this.fieldsDouble, other.fieldsDouble)) {
       return false;
     }
-    if(!Arrays.deepEquals(this.fieldsString, other.fieldsString)) {
+    if (!Arrays.deepEquals(this.fieldsString, other.fieldsString)) {
       return false;
     }
-    if(!Arrays.deepEquals(this.fieldsObject, other.fieldsObject)) {
+    if (!Arrays.deepEquals(this.fieldsObject, other.fieldsObject)) {
       return false;
     }
-    if(this.fieldDescriptor != other.fieldDescriptor && (this.fieldDescriptor == null || !this.fieldDescriptor.equals(other.fieldDescriptor))) {
+    if (this.fieldDescriptor != other.fieldDescriptor && (this.fieldDescriptor == null
+        || !this.fieldDescriptor.equals(other.fieldDescriptor))) {
       return false;
     }
     return true;
@@ -756,6 +755,11 @@ public class GPOMutable implements Serializable
   @Override
   public String toString()
   {
-    return "GPOMutable{" + "fieldsBoolean=" + Arrays.toString(fieldsBoolean) + ", fieldsCharacter=" + Arrays.toString(fieldsCharacter) + ", fieldsByte=" + Arrays.toString(fieldsByte) + ", fieldsShort=" + Arrays.toString(fieldsShort) + ", fieldsInteger=" + Arrays.toString(fieldsInteger) + ", fieldsLong=" + Arrays.toString(fieldsLong) + ", fieldsFloat=" + Arrays.toString(fieldsFloat) + ", fieldsDouble=" + Arrays.toString(fieldsDouble) + ", fieldsString=" + Arrays.toString(fieldsString) + ", fieldDescriptor=" + fieldDescriptor + '}';
+    return "GPOMutable{" + "fieldsBoolean=" + Arrays.toString(fieldsBoolean) + ", fieldsCharacter="
+        + Arrays.toString(fieldsCharacter) + ", fieldsByte=" + Arrays.toString(fieldsByte) + ", fieldsShort="
+        + Arrays.toString(fieldsShort) + ", fieldsInteger=" + Arrays.toString(fieldsInteger) + ", fieldsLong="
+        + Arrays.toString(fieldsLong) + ", fieldsFloat=" + Arrays.toString(fieldsFloat) + ", fieldsDouble="
+        + Arrays.toString(fieldsDouble) + ", fieldsString=" + Arrays.toString(fieldsString) + ", fieldDescriptor="
+        + fieldDescriptor + '}';
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-malhar/blob/3735316e/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOType.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOType.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOType.java
index 28ff03c..2e202b4 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOType.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/GPOType.java
@@ -34,7 +34,27 @@ import com.datatorrent.lib.util.PojoUtils.GetterBoolean;
 import com.datatorrent.lib.util.PojoUtils.GetterByte;
 import com.datatorrent.lib.util.PojoUtils.GetterChar;
 
-import static com.datatorrent.lib.appdata.gpo.GPOUtils.*;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.createGetters;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.createGettersObject;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.createGettersString;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.deserializeBoolean;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.deserializeByte;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.deserializeChar;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.deserializeDouble;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.deserializeFloat;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.deserializeInt;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.deserializeLong;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.deserializeShort;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.deserializeString;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.serializeBoolean;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.serializeByte;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.serializeChar;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.serializeDouble;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.serializeFloat;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.serializeInt;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.serializeLong;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.serializeShort;
+import static com.datatorrent.lib.appdata.gpo.GPOUtils.serializeString;
 
 /**
  * This is a helper class that reduces the need for switch statements in may utility method in {@link GPOUtils}.
@@ -44,17 +64,14 @@ abstract class GPOType
 {
   public static final GPOType[] GPO_TYPE_ARRAY;
 
-  static
-  {
+  static {
     GPO_TYPE_ARRAY = new GPOType[Type.values().length];
     Type[] types = Type.values();
 
-    for(int index = 0;
-        index < types.length;
-        index++) {
+    for (int index = 0; index < types.length; index++) {
       Type type = types[index];
 
-      switch(type) {
+      switch (type) {
         case BOOLEAN: {
           GPO_TYPE_ARRAY[index] = new BooleanT();
           break;
@@ -102,11 +119,19 @@ abstract class GPOType
   }
 
   public abstract void setFieldFromJSON(GPOMutable gpo, String field, JSONArray jo, int index);
-  public abstract void serializeJSONObject(JSONObject jo, GPOMutable gpo, String field, ResultFormatter resultFormatter) throws JSONException;
+
+  public abstract void serializeJSONObject(JSONObject jo, GPOMutable gpo, String field, ResultFormatter resultFormatter)
+      throws JSONException;
+
   public abstract void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset);
+
   public abstract void deserialize(GPOMutable gpo, String Field, byte[] serializedGPO, MutableInt offset);
-  public abstract void buildGPOGetters(GPOGetters gpoGetters, List<String> fields, Map<String, String> fieldToGetter, Class<?> clazz);
+
+  public abstract void buildGPOGetters(GPOGetters gpoGetters, List<String> fields, Map<String, String> fieldToGetter,
+      Class<?> clazz);
+
   public abstract byte[] serialize(Object object);
+
   public abstract Object deserialize(byte[] object, MutableInt offset);
 
   public static class BooleanT extends GPOType
@@ -120,30 +145,28 @@ abstract class GPOType
     @Override
     public void setFieldFromJSON(GPOMutable gpo, String field, JSONArray jo, int index)
     {
-        Boolean val;
+      Boolean val;
 
-        try {
-          val = jo.getBoolean(index);
-        }
-        catch(JSONException ex) {
-          throw new IllegalArgumentException("The key " + field + " does not have a valid bool value.", ex);
-        }
+      try {
+        val = jo.getBoolean(index);
+      } catch (JSONException ex) {
+        throw new IllegalArgumentException("The key " + field + " does not have a valid bool value.", ex);
+      }
 
-        gpo.setFieldGeneric(field, val);
+      gpo.setFieldGeneric(field, val);
     }
 
     @Override
-    public void serializeJSONObject(JSONObject jo, GPOMutable gpo, String field, ResultFormatter resultFormatter) throws JSONException
+    public void serializeJSONObject(JSONObject jo, GPOMutable gpo, String field, ResultFormatter resultFormatter)
+        throws JSONException
     {
-        jo.put(field, gpo.getFieldBool(field));
+      jo.put(field, gpo.getFieldBool(field));
     }
 
     @Override
     public void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset)
     {
-        serializeBoolean(gpo.getFieldBool(field),
-                         sbytes,
-                         offset);
+      serializeBoolean(gpo.getFieldBool(field), sbytes, offset);
     }
 
     @Override
@@ -156,17 +179,13 @@ abstract class GPOType
     @Override
     public void buildGPOGetters(GPOGetters gpoGetters, List<String> fields, Map<String, String> fieldToGetter, Class<?> clazz)
     {
-      gpoGetters.gettersBoolean = createGetters(fields,
-                                                fieldToGetter,
-                                                clazz,
-                                                boolean.class,
-                                                GetterBoolean.class);
+      gpoGetters.gettersBoolean = createGetters(fields, fieldToGetter, clazz, boolean.class, GetterBoolean.class);
     }
 
     @Override
     public byte[] serialize(Object object)
     {
-      return GPOUtils.serializeBoolean((Boolean) object);
+      return GPOUtils.serializeBoolean((Boolean)object);
     }
 
     @Override
@@ -191,12 +210,8 @@ abstract class GPOType
 
       try {
         val = jo.getLong(index);
-      }
-      catch(JSONException ex) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " does not have a valid long value.",
-                                           ex);
+      } catch (JSONException ex) {
+        throw new IllegalArgumentException("The key " + field + " does not have a valid long value.", ex);
       }
 
       gpo.setField(field, val);
@@ -211,9 +226,7 @@ abstract class GPOType
     @Override
     public void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset)
     {
-      serializeChar(gpo.getFieldChar(field),
-                    sbytes,
-                    offset);
+      serializeChar(gpo.getFieldChar(field), sbytes, offset);
     }
 
     @Override
@@ -226,17 +239,13 @@ abstract class GPOType
     @Override
     public void buildGPOGetters(GPOGetters gpoGetters, List<String> fields, Map<String, String> fieldToGetter, Class<?> clazz)
     {
-      gpoGetters.gettersChar = createGetters(fields,
-                                             fieldToGetter,
-                                             clazz,
-                                             char.class,
-                                             GetterChar.class);
+      gpoGetters.gettersChar = createGetters(fields, fieldToGetter, clazz, char.class, GetterChar.class);
     }
 
     @Override
     public byte[] serialize(Object object)
     {
-      return GPOUtils.serializeChar((Character) object);
+      return GPOUtils.serializeChar((Character)object);
     }
 
     @Override
@@ -261,12 +270,8 @@ abstract class GPOType
 
       try {
         val = jo.getString(index);
-      }
-      catch(JSONException ex) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " does not have a valid string value.",
-                                           ex);
+      } catch (JSONException ex) {
+        throw new IllegalArgumentException("The key " + field + " does not have a valid string value.", ex);
       }
 
       gpo.setField(field, val);
@@ -281,9 +286,7 @@ abstract class GPOType
     @Override
     public void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset)
     {
-      serializeString(gpo.getFieldString(field),
-                      sbytes,
-                      offset);
+      serializeString(gpo.getFieldString(field), sbytes, offset);
     }
 
     @Override
@@ -296,15 +299,13 @@ abstract class GPOType
     @Override
     public void buildGPOGetters(GPOGetters gpoGetters, List<String> fields, Map<String, String> fieldToGetter, Class<?> clazz)
     {
-      gpoGetters.gettersString = createGettersString(fields,
-                                                     fieldToGetter,
-                                                     clazz);
+      gpoGetters.gettersString = createGettersString(fields, fieldToGetter, clazz);
     }
 
     @Override
     public byte[] serialize(Object object)
     {
-      return GPOUtils.serializeString((String) object);
+      return GPOUtils.serializeString((String)object);
     }
 
     @Override
@@ -382,27 +383,18 @@ abstract class GPOType
 
       try {
         val = jo.getInt(index);
-      }
-      catch(JSONException ex) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " does not have a valid byte value.", ex);
+      } catch (JSONException ex) {
+        throw new IllegalArgumentException("The key " + field + " does not have a valid byte value.", ex);
       }
 
-      if(val < (int)Byte.MIN_VALUE) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " has a value "
-                                           + val
-                                           + " which is too small to fit into a byte.");
+      if (val < (int)Byte.MIN_VALUE) {
+        throw new IllegalArgumentException("The key " + field + " has a value " + val
+            + " which is too small to fit into a byte.");
       }
 
-      if(val > (int)Byte.MAX_VALUE) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " has a value "
-                                           + val
-                                           + " which is too larg to fit into a byte.");
+      if (val > (int)Byte.MAX_VALUE) {
+        throw new IllegalArgumentException("The key " + field + " has a value " + val
+            + " which is too larg to fit into a byte.");
       }
 
       gpo.setField(field, (byte)val);
@@ -417,9 +409,7 @@ abstract class GPOType
     @Override
     public void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset)
     {
-      serializeByte(gpo.getFieldByte(field),
-                    sbytes,
-                    offset);
+      serializeByte(gpo.getFieldByte(field), sbytes, offset);
     }
 
     @Override
@@ -442,7 +432,7 @@ abstract class GPOType
     @Override
     public byte[] serialize(Object object)
     {
-      return GPOUtils.serializeByte((Byte) object);
+      return GPOUtils.serializeByte((Byte)object);
     }
 
     @Override
@@ -467,28 +457,18 @@ abstract class GPOType
 
       try {
         val = jo.getInt(index);
-      }
-      catch(JSONException ex) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " does not have a valid short value.",
-                                           ex);
+      } catch (JSONException ex) {
+        throw new IllegalArgumentException("The key " + field + " does not have a valid short value.", ex);
       }
 
-      if(val < (int)Short.MIN_VALUE) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " has a value "
-                                           + val
-                                           + " which is too small to fit into a short.");
+      if (val < (int)Short.MIN_VALUE) {
+        throw new IllegalArgumentException("The key " + field + " has a value " + val
+            + " which is too small to fit into a short.");
       }
 
-      if(val > (int)Short.MAX_VALUE) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " has a value "
-                                           + val
-                                           + " which is too large to fit into a short.");
+      if (val > (int)Short.MAX_VALUE) {
+        throw new IllegalArgumentException("The key " + field + " has a value " + val
+            + " which is too large to fit into a short.");
       }
 
       gpo.setField(field, (short)val);
@@ -503,9 +483,7 @@ abstract class GPOType
     @Override
     public void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset)
     {
-      serializeShort(gpo.getFieldShort(field),
-                     sbytes,
-                     offset);
+      serializeShort(gpo.getFieldShort(field), sbytes, offset);
     }
 
     @Override
@@ -528,7 +506,7 @@ abstract class GPOType
     @Override
     public byte[] serialize(Object object)
     {
-      return GPOUtils.serializeShort((Short) object);
+      return GPOUtils.serializeShort((Short)object);
     }
 
     @Override
@@ -553,12 +531,8 @@ abstract class GPOType
 
       try {
         val = jo.getInt(index);
-      }
-      catch(JSONException ex) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " does not have a valid int value.",
-                                           ex);
+      } catch (JSONException ex) {
+        throw new IllegalArgumentException("The key " + field + " does not have a valid int value.", ex);
       }
 
       gpo.setField(field, val);
@@ -573,9 +547,7 @@ abstract class GPOType
     @Override
     public void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset)
     {
-      serializeInt(gpo.getFieldInt(field),
-                   sbytes,
-                   offset);
+      serializeInt(gpo.getFieldInt(field), sbytes, offset);
     }
 
     @Override
@@ -598,7 +570,7 @@ abstract class GPOType
     @Override
     public byte[] serialize(Object object)
     {
-      return GPOUtils.serializeInt((Integer) object);
+      return GPOUtils.serializeInt((Integer)object);
     }
 
     @Override
@@ -623,12 +595,8 @@ abstract class GPOType
 
       try {
         val = jo.getLong(index);
-      }
-      catch(JSONException ex) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " does not have a valid long value.",
-                                           ex);
+      } catch (JSONException ex) {
+        throw new IllegalArgumentException("The key " + field + " does not have a valid long value.", ex);
       }
 
       gpo.setField(field, val);
@@ -643,9 +611,7 @@ abstract class GPOType
     @Override
     public void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset)
     {
-      serializeLong(gpo.getFieldLong(field),
-                    sbytes,
-                    offset);
+      serializeLong(gpo.getFieldLong(field), sbytes, offset);
     }
 
     @Override
@@ -668,7 +634,7 @@ abstract class GPOType
     @Override
     public byte[] serialize(Object object)
     {
-      return GPOUtils.serializeLong((Long) object);
+      return GPOUtils.serializeLong((Long)object);
     }
 
     @Override
@@ -693,12 +659,8 @@ abstract class GPOType
 
       try {
         val = (float)jo.getDouble(index);
-      }
-      catch(JSONException ex) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " does not have a valid double value.",
-                                           ex);
+      } catch (JSONException ex) {
+        throw new IllegalArgumentException("The key " + field + " does not have a valid double value.", ex);
       }
 
       gpo.setFieldGeneric(field, val);
@@ -713,9 +675,7 @@ abstract class GPOType
     @Override
     public void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset)
     {
-      serializeFloat(gpo.getFieldFloat(field),
-                     sbytes,
-                     offset);
+      serializeFloat(gpo.getFieldFloat(field), sbytes, offset);
     }
 
     @Override
@@ -728,17 +688,13 @@ abstract class GPOType
     @Override
     public void buildGPOGetters(GPOGetters gpoGetters, List<String> fields, Map<String, String> fieldToGetter, Class<?> clazz)
     {
-      gpoGetters.gettersFloat = createGetters(fields,
-                                              fieldToGetter,
-                                              clazz,
-                                              float.class,
-                                              PojoUtils.GetterFloat.class);
+      gpoGetters.gettersFloat = createGetters(fields, fieldToGetter, clazz, float.class, PojoUtils.GetterFloat.class);
     }
 
     @Override
     public byte[] serialize(Object object)
     {
-      return GPOUtils.serializeFloat((Float) object);
+      return GPOUtils.serializeFloat((Float)object);
     }
 
     @Override
@@ -763,12 +719,8 @@ abstract class GPOType
 
       try {
         val = jo.getDouble(index);
-      }
-      catch(JSONException ex) {
-        throw new IllegalArgumentException("The key "
-                                           + field
-                                           + " does not have a valid double value.",
-                                           ex);
+      } catch (JSONException ex) {
+        throw new IllegalArgumentException("The key " + field + " does not have a valid double value.", ex);
       }
 
       gpo.setFieldGeneric(field, val);
@@ -783,9 +735,7 @@ abstract class GPOType
     @Override
     public void serialize(GPOMutable gpo, String field, byte[] sbytes, MutableInt offset)
     {
-      serializeDouble(gpo.getFieldDouble(field),
-                      sbytes,
-                      offset);
+      serializeDouble(gpo.getFieldDouble(field), sbytes, offset);
     }
 
     @Override
@@ -798,17 +748,13 @@ abstract class GPOType
     @Override
     public void buildGPOGetters(GPOGetters gpoGetters, List<String> fields, Map<String, String> fieldToGetter, Class<?> clazz)
     {
-      gpoGetters.gettersDouble = createGetters(fields,
-                                               fieldToGetter,
-                                               clazz,
-                                               double.class,
-                                               PojoUtils.GetterDouble.class);
+      gpoGetters.gettersDouble = createGetters(fields, fieldToGetter, clazz, double.class, PojoUtils.GetterDouble.class);
     }
 
     @Override
     public byte[] serialize(Object object)
     {
-      return GPOUtils.serializeDouble((Double) object);
+      return GPOUtils.serializeDouble((Double)object);
     }
 
     @Override