You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@apex.apache.org by vr...@apache.org on 2016/09/27 16:41:15 UTC

[1/6] apex-malhar git commit: Upgrade checkstyle rules to 1.1.0

Repository: apex-malhar
Updated Branches:
  refs/heads/master 8e39f8236 -> 763d14fca


Upgrade checkstyle rules to 1.1.0


Project: http://git-wip-us.apache.org/repos/asf/apex-malhar/repo
Commit: http://git-wip-us.apache.org/repos/asf/apex-malhar/commit/90b5c9b8
Tree: http://git-wip-us.apache.org/repos/asf/apex-malhar/tree/90b5c9b8
Diff: http://git-wip-us.apache.org/repos/asf/apex-malhar/diff/90b5c9b8

Branch: refs/heads/master
Commit: 90b5c9b883afb6b5656c89a354b82e03231acb48
Parents: 8e39f82
Author: Thomas Weise <th...@datatorrent.com>
Authored: Mon Sep 26 19:49:45 2016 -0700
Committer: Thomas Weise <th...@datatorrent.com>
Committed: Mon Sep 26 19:49:45 2016 -0700

----------------------------------------------------------------------
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/90b5c9b8/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 4047b90..beb22b4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -154,7 +154,7 @@
             <dependency>
               <groupId>org.apache.apex</groupId>
               <artifactId>apex-codestyle-config</artifactId>
-              <version>1.0.0-incubating</version>
+              <version>1.1.0</version>
             </dependency>
           </dependencies>
         </plugin>


[3/6] apex-malhar git commit: Fix trailing whitespace.

Posted by vr...@apache.org.
http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/parser/XmlParser.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/parser/XmlParser.java b/library/src/main/java/com/datatorrent/lib/parser/XmlParser.java
index c8eeacc..bdf6fad 100644
--- a/library/src/main/java/com/datatorrent/lib/parser/XmlParser.java
+++ b/library/src/main/java/com/datatorrent/lib/parser/XmlParser.java
@@ -54,7 +54,7 @@ import com.datatorrent.netlet.util.DTThrowable;
  * the Pojo Class. <br>
  * <b>dateFormats</b>: Comma separated string of date formats e.g
  * dd/mm/yyyy,dd-mmm-yyyy where first one would be considered default
- * 
+ *
  * @displayName XmlParser
  * @category Parsers
  * @tags xml pojo parser

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/projection/ProjectionOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/projection/ProjectionOperator.java b/library/src/main/java/com/datatorrent/lib/projection/ProjectionOperator.java
index 8c22140..6c17529 100644
--- a/library/src/main/java/com/datatorrent/lib/projection/ProjectionOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/projection/ProjectionOperator.java
@@ -59,7 +59,7 @@ import com.datatorrent.lib.util.PojoUtils;
  * - projected port emits POJOs with projected fields from input POJOs
  * - remainder port, if connected, emits POJOs with remainder fields from input POJOs
  * - error port emits input POJOs as is upon error situations
- * 
+ *
  * <b>Examples</b>
  * For {a, b, c} type of input tuples
  *  - when selectFields = "" and dropFields = "", projected port shall emit {a, b, c}

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java b/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java
index 9532180..d6589ee 100644
--- a/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/script/ScriptOperator.java
@@ -31,8 +31,8 @@ import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.common.util.BaseOperator;
 
 /**
- * A base implementation of a BaseOperator for language script operator.&nbsp; Subclasses should provide the 
-   implementation of getting the bindings and process method. 
+ * A base implementation of a BaseOperator for language script operator.&nbsp; Subclasses should provide the
+   implementation of getting the bindings and process method.
  * Interface for language script operator.
  * <p>
  * @displayName Script
@@ -55,13 +55,13 @@ public abstract class ScriptOperator extends BaseOperator
     }
 
   };
-  
+
   /**
    * Output outBindings port that emits a map of &lt;String, Object&gt.
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Map<String, Object>> outBindings = new DefaultOutputPort<Map<String, Object>>();
-  
+
   /**
    * Output result port that emits an object as the result.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java b/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java
index d206071..a7f5147 100644
--- a/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java
+++ b/library/src/main/java/com/datatorrent/lib/util/AbstractKeyValueStorageAgent.java
@@ -32,10 +32,10 @@ import com.datatorrent.api.StorageAgent;
 /**
  * Abstract implementation of {@link ApplicationAwareStorageAgent} which can be
  * configured be KeyValue store witch implementation of {@link StorageAgentKeyValueStore}
- * 
+ *
  * NOTE - this should be picked from APEX-CORE once below feature is release
  * https://issues.apache.org/jira/browse/APEXCORE-283
- * 
+ *
  * @param <S>
  *          Store implementation
  *
@@ -71,7 +71,7 @@ public abstract class AbstractKeyValueStorageAgent<S extends StorageAgentKeyValu
 
   /**
    * Return yarn application id of running application
-   * 
+   *
    * @return
    */
   public String getApplicationId()
@@ -81,7 +81,7 @@ public abstract class AbstractKeyValueStorageAgent<S extends StorageAgentKeyValu
 
   /**
    * Set yarn application id
-   * 
+   *
    * @param applicationId
    */
   public void setApplicationId(String applicationId)
@@ -92,7 +92,7 @@ public abstract class AbstractKeyValueStorageAgent<S extends StorageAgentKeyValu
   /**
    * Generates key from operator id and window id to store unique operator
    * checkpoints
-   * 
+   *
    * @param operatorId
    * @param windowId
    * @return unique key for store
@@ -104,14 +104,14 @@ public abstract class AbstractKeyValueStorageAgent<S extends StorageAgentKeyValu
 
   /**
    * Stores the given operator object in configured store
-   * 
+   *
    * @param object
    *          Operator object to store
    * @param operatorId
    *          of operator
    * @param windowId
    *          window id of operator to checkpoint
-   * 
+   *
    */
   @Override
   public void save(Object object, int operatorId, long windowId) throws IOException
@@ -136,7 +136,7 @@ public abstract class AbstractKeyValueStorageAgent<S extends StorageAgentKeyValu
   /**
    * Retrieves the operator object for given operator & window from configured
    * store
-   * 
+   *
    * @param operatorId
    *          of operator
    * @param windowId
@@ -167,7 +167,7 @@ public abstract class AbstractKeyValueStorageAgent<S extends StorageAgentKeyValu
 
   /**
    * Removes stored operator object for given operatorId & windowId from store
-   * 
+   *
    */
   @Override
   public void delete(int operatorId, long windowId) throws IOException
@@ -189,7 +189,7 @@ public abstract class AbstractKeyValueStorageAgent<S extends StorageAgentKeyValu
   /**
    * Returns list window id for given operator id for which operator objects are
    * stored but not removed
-   * 
+   *
    */
   @Override
   public long[] getWindowIds(int operatorId) throws IOException

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/util/StorageAgentKeyValueStore.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/StorageAgentKeyValueStore.java b/library/src/main/java/com/datatorrent/lib/util/StorageAgentKeyValueStore.java
index 6deed74..5477f4a 100644
--- a/library/src/main/java/com/datatorrent/lib/util/StorageAgentKeyValueStore.java
+++ b/library/src/main/java/com/datatorrent/lib/util/StorageAgentKeyValueStore.java
@@ -24,7 +24,7 @@ import com.datatorrent.lib.db.KeyValueStore;
 
 /**
  * Interface for KeyValue store
- * 
+ *
  *
  * @since 3.4.0
  */
@@ -33,7 +33,7 @@ public interface StorageAgentKeyValueStore extends KeyValueStore
 
    /**
    * Get all the keys associated with key
-   * 
+   *
    * @param key
    * @return the list of all associated keys
    */
@@ -41,10 +41,10 @@ public interface StorageAgentKeyValueStore extends KeyValueStore
 
   /**
    * Set table/region name of store
-   * 
+   *
    * @param tableName
    */
   public void setTableName(String tableName);
-  
-  
+
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/util/TableInfo.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/TableInfo.java b/library/src/main/java/com/datatorrent/lib/util/TableInfo.java
index b0d454d..52bf117 100644
--- a/library/src/main/java/com/datatorrent/lib/util/TableInfo.java
+++ b/library/src/main/java/com/datatorrent/lib/util/TableInfo.java
@@ -66,6 +66,6 @@ public class TableInfo<T extends FieldInfo>
   {
     this.fieldsInfo = fieldsInfo;
   }
-  
-  
+
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/util/TopNSort.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/TopNSort.java b/library/src/main/java/com/datatorrent/lib/util/TopNSort.java
index ba9cb01..042c75b 100644
--- a/library/src/main/java/com/datatorrent/lib/util/TopNSort.java
+++ b/library/src/main/java/com/datatorrent/lib/util/TopNSort.java
@@ -136,7 +136,7 @@ public class TopNSort<E>
     if (list.isEmpty()) {
       return list;
     }
-  
+
     Collections.reverse(list);
     return list;
     //return ret;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/util/package-info.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/util/package-info.java b/library/src/main/java/com/datatorrent/lib/util/package-info.java
index f65e415..7b1140c 100644
--- a/library/src/main/java/com/datatorrent/lib/util/package-info.java
+++ b/library/src/main/java/com/datatorrent/lib/util/package-info.java
@@ -17,7 +17,7 @@
  * under the License.
  */
 /**
- * Library of shared operators and utilities. 
+ * Library of shared operators and utilities.
  */
 @org.apache.hadoop.classification.InterfaceStability.Evolving
 package com.datatorrent.lib.util;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dedup/BoundedDedupOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dedup/BoundedDedupOperator.java b/library/src/main/java/org/apache/apex/malhar/lib/dedup/BoundedDedupOperator.java
index 5509ba0..7763103 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dedup/BoundedDedupOperator.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dedup/BoundedDedupOperator.java
@@ -42,17 +42,17 @@ import com.datatorrent.netlet.util.Slice;
  * An implementation for {@link AbstractDeduper} which handles the case of bounded data set.
  * This implementation assumes that the incoming tuple does not have a time field, and the de-duplication
  * is to be strictly based on the key of the tuple.
- * 
+ *
  * This implementation uses {@link ManagedTimeStateImpl} for storing the tuple keys on the persistent storage.
- * 
+ *
  * Following properties need to be configured for the functioning of the operator:
  * 1. {@link #keyExpression}: The java expression to extract the key fields in the incoming tuple (POJO)
- * 2. {@link #numBuckets} (optional): The number of buckets that need to be used for storing the keys of the 
+ * 2. {@link #numBuckets} (optional): The number of buckets that need to be used for storing the keys of the
  * incoming tuples.
- * NOTE: Users can decide upon the proper value for this parameter by guessing the number of distinct keys 
+ * NOTE: Users can decide upon the proper value for this parameter by guessing the number of distinct keys
  * in the application. A appropriate value would be sqrt(num distinct keys). In case, the number of distinct keys is a
  * huge number, leave it blank so that the default value of 46340 will be used. The rationale for using this number is
- * that sqrt(max integer) = 46340. This implies that the number of buckets used will roughly be equal to the size of 
+ * that sqrt(max integer) = 46340. This implies that the number of buckets used will roughly be equal to the size of
  * each bucket, thus spreading the load equally among each bucket.
  *
  *
@@ -194,10 +194,10 @@ public class BoundedDedupOperator extends AbstractDeduper<Object>
 
   /**
    * Sets the number of buckets
-   * NOTE: Users can decide upon the proper value for this parameter by guessing the number of distinct keys 
+   * NOTE: Users can decide upon the proper value for this parameter by guessing the number of distinct keys
    * in the application. A appropriate value would be sqrt(num distinct keys). In case, the number of distinct keys is a
    * huge number, leave it blank so that the default value of 46340 will be used. The rationale for using this number is
-   * that sqrt(max integer) = 46340. This implies that the number of buckets used will roughly be equal to the size of 
+   * that sqrt(max integer) = 46340. This implies that the number of buckets used will roughly be equal to the size of
    * each bucket, thus spreading the load equally among each bucket.
    * @param numBuckets the number of buckets
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregator.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregator.java
index f7ab25d..52ef811 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregator.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregator.java
@@ -44,9 +44,9 @@ public abstract class AbstractCompositeAggregator implements CompositeAggregator
   //protected int embedAggregatorID;
   protected Set<Integer> embedAggregatorDdIds = Sets.newHashSet();
   protected Set<String> fields = Sets.newHashSet();
-  
+
   protected DimensionsConversionContext dimensionsConversionContext;
-  
+
   public DimensionsConversionContext getDimensionsConversionContext()
   {
     return dimensionsConversionContext;
@@ -63,7 +63,7 @@ public abstract class AbstractCompositeAggregator implements CompositeAggregator
     this.setDimensionsConversionContext(dimensionsConversionContext);
     return this;
   }
-  
+
   public String getEmbedAggregatorName()
   {
     return embedAggregatorName;
@@ -96,7 +96,7 @@ public abstract class AbstractCompositeAggregator implements CompositeAggregator
   {
     this.dimensionDescriptorID = dimensionDescriptorID;
   }
-  
+
   @Override
   public int getAggregatorID()
   {
@@ -118,7 +118,7 @@ public abstract class AbstractCompositeAggregator implements CompositeAggregator
   {
     this.aggregateDescriptor = aggregateDescriptor;
   }
-  
+
   @Override
   public Set<String> getFields()
   {
@@ -155,7 +155,7 @@ public abstract class AbstractCompositeAggregator implements CompositeAggregator
   {
     embedAggregatorDdIds.addAll(ddids);
   }
-  
+
   /**
    * bright: TODO: check
    */
@@ -164,5 +164,5 @@ public abstract class AbstractCompositeAggregator implements CompositeAggregator
   {
     return null;
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregatorFactory.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregatorFactory.java
index 8156064..5cf4582 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregatorFactory.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractCompositeAggregatorFactory.java
@@ -28,7 +28,7 @@ public abstract class AbstractCompositeAggregatorFactory implements CompositeAgg
   protected static final String NAME_TEMPLATE = "%s-%s-%s";
   protected static final String PROPERTY_SEPERATOR = "_";
   protected static final String PROPERTY_VALUE_SEPERATOR = "|";
-  
+
   @Override
   public String getCompositeAggregatorName(String aggregatorType, String embededAggregatorName,
       Map<String, Object> properties)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractIncrementalAggregator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractIncrementalAggregator.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractIncrementalAggregator.java
index bf2e342..bf2054e 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractIncrementalAggregator.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractIncrementalAggregator.java
@@ -48,7 +48,7 @@ import com.datatorrent.lib.appdata.schemas.CustomTimeBucket;
  * the sum aggregator. And the {DimensionsEventregate} event produced by the sum aggregator will contain two fields,
  * one for cost and one for revenue.
  * </p>
- * 
+ *
  *
  * @since 3.4.0
  */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractTopBottomAggregator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractTopBottomAggregator.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractTopBottomAggregator.java
index 41d1372..e38ea0e 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractTopBottomAggregator.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AbstractTopBottomAggregator.java
@@ -43,7 +43,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
   public static final String PROP_COUNT = "count";
   protected int count;
   protected SortedSet<String> subCombinations = Sets.newTreeSet();
-  
+
   public AbstractTopBottomAggregator withEmbedAggregatorName(String embedAggregatorName)
   {
     this.setEmbedAggregatorName(embedAggregatorName);
@@ -55,7 +55,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
     this.setSubCombinations(subCombinations);
     return this;
   }
-  
+
   public AbstractTopBottomAggregator withCount(int count)
   {
     this.setCount(count);
@@ -71,7 +71,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
   {
     this.count = count;
   }
-  
+
   public void setSubCombinations(Set<String> subCombinations)
   {
     this.subCombinations.clear();
@@ -91,11 +91,12 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
   /**
    * TOP/BOTTOM return a list of value
    */
+  @Override
   public Type getOutputType()
   {
     return Type.OBJECT;
   }
-  
+
   @Override
   public int hashCode()
   {
@@ -115,7 +116,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
     if (getClass() != obj.getClass()) {
       return false;
     }
-    
+
     AbstractTopBottomAggregator other = (AbstractTopBottomAggregator)obj;
     if (embedAggregatorName != other.embedAggregatorName
         && (embedAggregatorName == null || !embedAggregatorName.equals(other.embedAggregatorName))) {
@@ -131,8 +132,8 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
 
     return true;
   }
-  
-  
+
+
   /**
    * The result keep a list of object for each aggregate value
    * The value of resultAggregate should keep a list of inputEventKey(the value can be get from cache or load) or a map
@@ -149,7 +150,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
     //there are problem for composite's value field descriptor, just ignore now.
     GPOMutable resultGpo = resultAggregate.getAggregates();
     final List<String> compositeFieldList = resultAggregate.getEventKey().getKey().getFieldDescriptor().getFieldList();
-    
+
     //Map<EventKey, Aggregate> existedSubEventKeyToAggregate = Maps.newHashMap();
     for (String valueField : resultGpo.getFieldDescriptor().getFieldList()) {
       //the resultGpo keep a list of sub aggregates
@@ -168,7 +169,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
 
   /**
    * get store map key from the eventKey
-   * 
+   *
    * @param eventKey
    * @return
    */
@@ -183,16 +184,16 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
       key.append(subEventKey.getKey().getField(field)).append(KEY_VALUE_SEPERATOR);
     }
     key.deleteCharAt(key.length() - 1);
-    
+
     return key.toString();
   }
-  
+
 
   /**
-   * update existed sub aggregate. 
+   * update existed sub aggregate.
    * The sub aggregates which kept in composite aggregate as candidate could be changed. synchronize the value with
    * input aggregates.
-   * 
+   *
    * @param resultAggregate
    * @param valueField
    * @param inputSubEventKeys
@@ -218,7 +219,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
       }
     }
   }
-  
+
   /**
    * need a map of value field from the inputGpo to resultGpo, use the index of Fields as the index
    * @param resultGpo
@@ -241,7 +242,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
           fieldToType.get(aggregateField));
     }
   }
-  
+
   /**
    * seperate it in case sub class override it.
    * @param fieldName
@@ -252,7 +253,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
   {
     return Maps.newHashMap();
   }
-  
+
   /**
    * compare the result(resultMap) with input(inputFieldName, inputFieldValue)
    * @param resultMap
@@ -275,7 +276,7 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
     }
 
   }
-  
+
   /**
    * shoud the result element replaced by input element.
    * the inputElement and resultElement should be same type
@@ -299,11 +300,11 @@ public abstract class AbstractTopBottomAggregator extends AbstractCompositeAggre
       int compareResult = ((Comparable<Object>)resultElement).compareTo(inputElement);
       return shouldReplaceResultElement(compareResult);
     }
-    
+
     //handle other cases
     throw new RuntimeException("Should NOT come here.");
-    
+
   }
-  
+
   protected abstract boolean shouldReplaceResultElement(int resultCompareToInput);
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorRegistry.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorRegistry.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorRegistry.java
index 6482c3b..85f1822 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorRegistry.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorRegistry.java
@@ -108,9 +108,9 @@ public class AggregatorRegistry implements Serializable
    * {@link IncrementalAggregator} to the corresponding {@link IncrementalAggregator}.
    */
   private transient Map<Integer, IncrementalAggregator> incrementalAggregatorIDToAggregator;
-  
+
   protected transient Map<Integer, AbstractTopBottomAggregator> topBottomAggregatorIDToAggregator;
-  
+
   /**
    * This is a map from the name assigned to an {@link IncrementalAggregator} to the {@link IncrementalAggregator}.
    */
@@ -119,19 +119,19 @@ public class AggregatorRegistry implements Serializable
    * This is a map from the name assigned to an {@link OTFAggregator} to the {@link OTFAggregator}.
    */
   private Map<String, OTFAggregator> nameToOTFAggregator;
-  
+
   /**
    * the map from TOPN and BOTTOM aggregator to name
    */
   private Map<String, AbstractTopBottomAggregator> nameToTopBottomAggregator = Maps.newHashMap();
-  
+
   /**
    * This is a map from the name of an {@link IncrementalAggregator} to the ID of that {@link IncrementalAggregator}.
    */
   private Map<String, Integer> incrementalAggregatorNameToID;
-  
+
   protected Map<String, Integer> topBottomAggregatorNameToID = Maps.newHashMap();
-  
+
   protected static Set<String> topBottomAggregatorNames;
 
 
@@ -269,12 +269,12 @@ public class AggregatorRegistry implements Serializable
       Preconditions.checkNotNull(entry.getKey());
       Preconditions.checkNotNull(entry.getValue());
     }
-    
+
     for (Map.Entry<String, Integer> entry : topBottomAggregatorNameToID.entrySet()) {
       Preconditions.checkNotNull(entry.getKey());
       Preconditions.checkNotNull(entry.getValue());
     }
-    
+
     for (Map.Entry<String, AbstractTopBottomAggregator> entry : nameToTopBottomAggregator.entrySet()) {
       Preconditions.checkNotNull(entry.getKey());
       Preconditions.checkNotNull(entry.getValue());
@@ -337,7 +337,7 @@ public class AggregatorRegistry implements Serializable
           nameToTopBottomAggregator.get(aggregatorName));
     }
   }
-  
+
   /**
    * This is a helper method which sets and validated the given mapping from an {@link IncrementalAggregator}'s name
    * to an {@link IncrementalAggregator}.
@@ -375,7 +375,7 @@ public class AggregatorRegistry implements Serializable
         nameToOTFAggregator.containsKey(aggregatorName)) {
       return true;
     }
-    
+
     //the composite probably send whole aggregator name
     String aggregatorType = aggregatorName.split("-")[0];
     return (AggregatorTopBottomType.valueOf(aggregatorType) != null);
@@ -399,12 +399,12 @@ public class AggregatorRegistry implements Serializable
   {
     return nameToOTFAggregator.containsKey(aggregatorName);
   }
-  
+
   public boolean isTopBottomAggregatorType(String aggregatorType)
   {
     return (AggregatorTopBottomType.valueOf(aggregatorType) != null);
   }
-  
+
   /**
    * Gets the mapping from an {@link IncrementalAggregator}'s class to the {@link IncrementalAggregator}.
    *

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorUtils.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorUtils.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorUtils.java
index d9ad83d..006cadf 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorUtils.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/AggregatorUtils.java
@@ -164,5 +164,5 @@ public final class AggregatorUtils
 
     return new FieldsDescriptor(fieldToType, fieldToSerde);
   }
-        
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregator.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregator.java
index 916467d..e64e957 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregator.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregator.java
@@ -37,13 +37,13 @@ public interface CompositeAggregator
   public int getDimensionDescriptorID();
 
   public int getAggregatorID();
-  
+
   public Set<Integer> getEmbedAggregatorDdIds();
-  
+
   public Set<String> getFields();
 
   public FieldsDescriptor getAggregateDescriptor();
-  
+
   public FieldsDescriptor getMetaDataDescriptor();
 
   /**
@@ -52,9 +52,9 @@ public interface CompositeAggregator
    * @return The output type of the {@link CompositeAggregator}.
    */
   public Type getOutputType();
-  
+
   /**
-   * 
+   *
    * @param resultAggregate the aggregate to put the result
    * @param inputEventKeys The input(incremental) event keys, used to locate the input aggregates
    * @param inputAggregatesRepo: the map of the EventKey to Aggregate keep the super set of aggregate required

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregatorFactory.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregatorFactory.java
index da1d225..18682d0 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregatorFactory.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/CompositeAggregatorFactory.java
@@ -35,7 +35,7 @@ public interface CompositeAggregatorFactory
    * @return
    */
   //public boolean isValidCompositeAggregatorName(String aggregatorName);
-  
+
   /**
    * get composite aggregator name based on composite aggregator information
    * @param aggregatorType
@@ -45,7 +45,7 @@ public interface CompositeAggregatorFactory
    */
   public String getCompositeAggregatorName(String aggregatorType, String embedAggregatorName,
       Map<String, Object> properties);
-  
+
   /**
    * create composite aggregator name based on composite aggregator information
    * @param aggregatorType

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/DefaultCompositeAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/DefaultCompositeAggregatorFactory.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/DefaultCompositeAggregatorFactory.java
index 125c3f1..a3a148a 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/DefaultCompositeAggregatorFactory.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/DefaultCompositeAggregatorFactory.java
@@ -25,7 +25,7 @@ import com.google.common.collect.Maps;
 /**
  * The DefaultCompositeAggregatorFactory find the specific factory according to the aggregator type
  * and delegate to the specific factory.
- * 
+ *
  *
  * @since 3.4.0
  */
@@ -34,9 +34,9 @@ public class DefaultCompositeAggregatorFactory implements CompositeAggregatorFac
   public static final DefaultCompositeAggregatorFactory defaultInst = new DefaultCompositeAggregatorFactory()
       .addFactory(AggregatorTopBottomType.TOPN.name(), TopBottomAggregatorFactory.defaultInstance)
       .addFactory(AggregatorTopBottomType.BOTTOMN.name(), TopBottomAggregatorFactory.defaultInstance);
-  
+
   protected Map<String, CompositeAggregatorFactory> factoryRepository = Maps.newHashMap();
-  
+
   @Override
   public String getCompositeAggregatorName(String aggregatorType, String embedAggregatorName,
       Map<String, Object> properties)
@@ -57,7 +57,7 @@ public class DefaultCompositeAggregatorFactory implements CompositeAggregatorFac
   {
     return factoryRepository.get(aggregatorType);
   }
-  
+
   public DefaultCompositeAggregatorFactory addFactory(String aggregatorType, CompositeAggregatorFactory factory)
   {
     factoryRepository.put(aggregatorType, factory);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/TopBottomAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/TopBottomAggregatorFactory.java b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/TopBottomAggregatorFactory.java
index 89f6bb7..8843b4e 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/TopBottomAggregatorFactory.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/dimensions/aggregator/TopBottomAggregatorFactory.java
@@ -33,7 +33,7 @@ public class TopBottomAggregatorFactory extends AbstractCompositeAggregatorFacto
   public static final String PROPERTY_NAME_SUB_COMBINATIONS = "subCombinations";
 
   public static final TopBottomAggregatorFactory defaultInstance = new TopBottomAggregatorFactory();
-  
+
   @Override
   public <T> AbstractTopBottomAggregator createCompositeAggregator(String aggregatorType, String embedAggregatorName,
       Map<String, Object> properties)
@@ -41,7 +41,7 @@ public class TopBottomAggregatorFactory extends AbstractCompositeAggregatorFacto
     return createTopBottomAggregator(aggregatorType, embedAggregatorName, getCount(properties),
         getSubCombinations(properties));
   }
-  
+
   public <T> AbstractTopBottomAggregator createTopBottomAggregator(String aggregatorType, String embedAggregatorName,
       int count, String[] subCombinations)
   {
@@ -58,7 +58,7 @@ public class TopBottomAggregatorFactory extends AbstractCompositeAggregatorFacto
     aggregator.setEmbedAggregatorName(embedAggregatorName);
     aggregator.setCount(count);
     aggregator.setSubCombinations(subCombinations);
-    
+
     return aggregator;
   }
 
@@ -66,12 +66,12 @@ public class TopBottomAggregatorFactory extends AbstractCompositeAggregatorFacto
   {
     return Integer.valueOf((String)properties.get(PROPERTY_NAME_COUNT));
   }
-  
+
   protected String[] getSubCombinations(Map<String, Object> properties)
   {
     return (String[])properties.get(PROPERTY_NAME_SUB_COMBINATIONS);
   }
-  
+
   /**
    * The properties of TOP or BOTTOM are count and subCombinations.
    * count only have one value and subCombinations is a set of string, we can order combinations to simplify the name
@@ -82,13 +82,13 @@ public class TopBottomAggregatorFactory extends AbstractCompositeAggregatorFacto
     StringBuilder sb = new StringBuilder();
     String count = (String)properties.get(PROPERTY_NAME_COUNT);
     sb.append(count).append(PROPERTY_SEPERATOR);
-    
+
     String[] subCombinations =  (String[])properties.get(PROPERTY_NAME_SUB_COMBINATIONS);
     Set<String> sortedSubCombinations = Sets.newTreeSet();
     for (String subCombination : subCombinations) {
       sortedSubCombinations.add(subCombination);
     }
-    
+
     for (String subCombination : sortedSubCombinations) {
       sb.append(subCombination).append(PROPERTY_SEPERATOR);
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReader.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReader.java b/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReader.java
index 31f35aa..268c51b 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReader.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReader.java
@@ -32,7 +32,7 @@ import com.datatorrent.lib.io.block.ReaderContext;
  * This operator can be used for reading records/tuples from Filesystem in
  * parallel (without ordering guarantees between tuples). Records can be
  * delimited (e.g. newline) or fixed width records. Output tuples are byte[].
- * 
+ *
  * Typically, this operator will be connected to output of FileSplitterInput to
  * read records in parallel.
  *
@@ -106,7 +106,7 @@ public class FSRecordReader extends FSSliceReader
 
   /**
    * Criteria for record split
-   * 
+   *
    * @param mode
    *          Mode
    */
@@ -117,7 +117,7 @@ public class FSRecordReader extends FSSliceReader
 
   /**
    * Criteria for record split
-   * 
+   *
    * @return mode
    */
   public RECORD_READER_MODE getMode()
@@ -127,7 +127,7 @@ public class FSRecordReader extends FSSliceReader
 
   /**
    * Length for fixed width record
-   * 
+   *
    * @param recordLength
    */
   public void setRecordLength(int recordLength)
@@ -140,7 +140,7 @@ public class FSRecordReader extends FSSliceReader
 
   /**
    * Length for fixed width record
-   * 
+   *
    * @return record length
    */
   public int getRecordLength()

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReaderModule.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReaderModule.java b/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReaderModule.java
index 0a9b321..d508320 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReaderModule.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/fs/FSRecordReaderModule.java
@@ -42,7 +42,7 @@ import com.datatorrent.lib.io.fs.FileSplitterInput;
  * (Ordering is not guaranteed when records are read in parallel)
  *
  * Input directory is scanned at specified interval to poll for new data.
- * 
+ *
  * The module reads data in parallel, following parameters can be configured
  * <br/>
  * 1. files: list of file(s)/directories to read<br/>
@@ -91,7 +91,7 @@ public class FSRecordReaderModule implements Module
 
   /**
    * Creates an instance of FileSplitter
-   * 
+   *
    * @return
    */
   public FileSplitterInput createFileSplitter()
@@ -101,7 +101,7 @@ public class FSRecordReaderModule implements Module
 
   /**
    * Creates an instance of Record Reader
-   * 
+   *
    * @return FSRecordReader instance
    */
   public FSRecordReader createRecordReader()
@@ -233,7 +233,7 @@ public class FSRecordReaderModule implements Module
 
   /**
    * Gets readers count
-   * 
+   *
    * @return readersCount
    */
   public int getReadersCount()
@@ -243,7 +243,7 @@ public class FSRecordReaderModule implements Module
 
   /**
    * Static count of readers to read input file
-   * 
+   *
    * @param readersCount
    */
   public void setReadersCount(int readersCount)
@@ -276,7 +276,7 @@ public class FSRecordReaderModule implements Module
    * Sets number of blocks to be emitted per window.<br/>
    * A lot of blocks emitted per window can overwhelm the downstream operators.
    * Set this value considering blockSize and readersCount.
-   * 
+   *
    * @param threshold
    */
   public void setBlocksThreshold(int threshold)
@@ -288,7 +288,7 @@ public class FSRecordReaderModule implements Module
    * Gets number of blocks to be emitted per window.<br/>
    * A lot of blocks emitted per window can overwhelm the downstream operators.
    * Set this value considering blockSize and readersCount.
-   * 
+   *
    * @return
    */
   public int getBlocksThreshold()
@@ -298,7 +298,7 @@ public class FSRecordReaderModule implements Module
 
   /**
    * Criteria for record split
-   * 
+   *
    * @return mode
    */
   public RECORD_READER_MODE getMode()
@@ -308,7 +308,7 @@ public class FSRecordReaderModule implements Module
 
   /**
    * Criteria for record split
-   * 
+   *
    * @param mode
    *          Mode
    */
@@ -319,7 +319,7 @@ public class FSRecordReaderModule implements Module
 
   /**
    * Length for fixed width record
-   * 
+   *
    * @return record length
    */
   public int getRecordLength()
@@ -329,7 +329,7 @@ public class FSRecordReaderModule implements Module
 
   /**
    * Length for fixed width record
-   * 
+   *
    * @param recordLength
    */
   public void setRecordLength(int recordLength)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/state/managed/IncrementalCheckpointManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/state/managed/IncrementalCheckpointManager.java b/library/src/main/java/org/apache/apex/malhar/lib/state/managed/IncrementalCheckpointManager.java
index 237f4b9..3b01ed2 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/state/managed/IncrementalCheckpointManager.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/state/managed/IncrementalCheckpointManager.java
@@ -193,7 +193,7 @@ public class IncrementalCheckpointManager extends FSWindowDataManager
     Map<Long, Object> artifactPerWindow = new HashMap<>();
     FileSystemWAL.FileSystemWALReader reader = getWal().getReader();
     reader.seek(getWal().getWalStartPointer());
-    
+
     Slice windowSlice = readNext(reader);
     while (reader.getCurrentPointer().compareTo(getWal().getWalEndPointerAfterRecovery()) < 0 && windowSlice != null) {
       long window = Longs.fromByteArray(windowSlice.toByteArray());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/state/spillable/TimeBasedPriorityQueue.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/state/spillable/TimeBasedPriorityQueue.java b/library/src/main/java/org/apache/apex/malhar/lib/state/spillable/TimeBasedPriorityQueue.java
index b12f119..52dfac7 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/state/spillable/TimeBasedPriorityQueue.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/state/spillable/TimeBasedPriorityQueue.java
@@ -117,13 +117,13 @@ public class TimeBasedPriorityQueue<T>
       } else if (this.timestamp > timeWrapper.getTimestamp()) {
         return 1;
       }
-      
+
       /**
        * NOTE: the following use the equals() to implement the compareTo() for key.
-       * it should be OK as the compareTo() only used by TimeBasedPriorityQueue.sortedTimestamp, 
+       * it should be OK as the compareTo() only used by TimeBasedPriorityQueue.sortedTimestamp,
        * which only care about the order of time ( the order for key doesn't matter ).
        * But would cause problem if add other function which depended on the order of the key.
-       * 
+       *
        * Add compare by hashCode when not equals in order to compatible with the interface for most cases.
        * Anyway, the order of key is not guaranteed. And we should not return 0 if not equals
        */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowDataManager.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowDataManager.java b/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowDataManager.java
index 2b85580..6e8774e 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowDataManager.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowDataManager.java
@@ -134,7 +134,7 @@ public class FSWindowDataManager implements WindowDataManager
    * Used by {@link IncrementalCheckpointManager}
    */
   private boolean relyOnCheckpoints;
-  
+
   private transient long largestCompletedWindow = Stateless.WINDOW_ID;
 
   private final FSWindowReplayWAL wal = new FSWindowReplayWAL();
@@ -303,7 +303,7 @@ public class FSWindowDataManager implements WindowDataManager
 
         long lastWindow = Stateless.WINDOW_ID;
         Slice slice = readNext(reader);
-  
+
         while (slice != null) {
           boolean skipComplete = skipNext(reader); //skip the artifact because we need just the largest window id.
           if (!skipComplete) {
@@ -311,7 +311,7 @@ public class FSWindowDataManager implements WindowDataManager
             break;
           }
           long offset = reader.getCurrentPointer().getOffset();
-    
+
           long window = Longs.fromByteArray(slice.toByteArray());
           if (ceilingWindow != null && window > ceilingWindow) {
             break;
@@ -393,7 +393,7 @@ public class FSWindowDataManager implements WindowDataManager
       }
     }
   }
-  
+
   /**
    * Save writes 2 entries to the wal: <br/>
    * <ol>
@@ -481,7 +481,7 @@ public class FSWindowDataManager implements WindowDataManager
 
         wal.windowWalParts.put(currentWindow, reader.getCurrentPointer().getPartNum());
         wal.retrievedWindow = readNext(reader); //null or next window
-        
+
         return fromSlice(data);
       } else if (windowId < currentWindow) {
         //no artifact saved corresponding to that window and artifact is not read.
@@ -500,7 +500,7 @@ public class FSWindowDataManager implements WindowDataManager
     }
     return null;
   }
-  
+
   /**
    * Deletes artifacts for all windows less than equal to committed window id.<p/>
    *

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowReplayWAL.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowReplayWAL.java b/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowReplayWAL.java
index 74ca929..d848804 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowReplayWAL.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/wal/FSWindowReplayWAL.java
@@ -73,12 +73,12 @@ public class FSWindowReplayWAL extends FileSystemWAL
       throw new RuntimeException("while setup");
     }
   }
-  
+
   public FileSystemWALPointer getWalEndPointerAfterRecovery()
   {
     return walEndPointerAfterRecovery;
   }
-  
+
   /**
    * Finalizes files just after rotation. Doesn't wait for the window to be committed.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/wal/FileSystemWAL.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/wal/FileSystemWAL.java b/library/src/main/java/org/apache/apex/malhar/lib/wal/FileSystemWAL.java
index b7d5ba1..49f61a4 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/wal/FileSystemWAL.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/wal/FileSystemWAL.java
@@ -163,7 +163,7 @@ public class FileSystemWAL implements WAL<FileSystemWAL.FileSystemWALReader, Fil
   {
     return filePath + "_" + partNumber;
   }
-  
+
   /**
    * @return the wal start pointer
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Average.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Average.java b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Average.java
index f0a66a4..af623f3 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Average.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Average.java
@@ -33,7 +33,7 @@ public class Average implements Accumulation<Double, MutablePair<Double, Long>,
   {
     return new MutablePair<>(0.0, 0L);
   }
-  
+
   @Override
   public MutablePair<Double, Long> accumulate(MutablePair<Double, Long> accu, Double input)
   {
@@ -41,7 +41,7 @@ public class Average implements Accumulation<Double, MutablePair<Double, Long>,
     accu.setRight(accu.getRight() + 1);
     return accu;
   }
-  
+
   @Override
   public MutablePair<Double, Long> merge(MutablePair<Double, Long> accu1, MutablePair<Double, Long> accu2)
   {
@@ -50,13 +50,13 @@ public class Average implements Accumulation<Double, MutablePair<Double, Long>,
     accu1.setRight(accu1.getRight() + accu2.getRight());
     return accu1;
   }
-  
+
   @Override
   public Double getOutput(MutablePair<Double, Long> accumulatedValue)
   {
     return accumulatedValue.getLeft();
   }
-  
+
   @Override
   public Double getRetraction(Double value)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Group.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Group.java b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Group.java
index f2affd1..d217ce9 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Group.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Group.java
@@ -35,27 +35,27 @@ public class Group<T> implements Accumulation<T, List<T>, List<T>>
   {
     return new ArrayList<>();
   }
-  
+
   @Override
   public List<T> accumulate(List<T> accumulatedValue, T input)
   {
     accumulatedValue.add(input);
     return accumulatedValue;
   }
-  
+
   @Override
   public List<T> merge(List<T> accumulatedValue1, List<T> accumulatedValue2)
   {
     accumulatedValue1.addAll(accumulatedValue2);
     return accumulatedValue1;
   }
-  
+
   @Override
   public List<T> getOutput(List<T> accumulatedValue)
   {
     return accumulatedValue;
   }
-  
+
   @Override
   public List<T> getRetraction(List<T> value)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Max.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Max.java b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Max.java
index 64ff0c4..92aec18 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Max.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Max.java
@@ -28,20 +28,20 @@ import org.apache.apex.malhar.lib.window.Accumulation;
  */
 public class Max<T> implements Accumulation<T, T, T>
 {
-  
+
   Comparator<T> comparator;
-  
+
   public void setComparator(Comparator<T> comparator)
   {
     this.comparator = comparator;
   }
-  
+
   @Override
   public T defaultAccumulatedValue()
   {
     return null;
   }
-  
+
   @Override
   public T accumulate(T accumulatedValue, T input)
   {
@@ -55,19 +55,19 @@ public class Max<T> implements Accumulation<T, T, T>
       throw new RuntimeException("Tuple cannot be compared");
     }
   }
-  
+
   @Override
   public T merge(T accumulatedValue1, T accumulatedValue2)
   {
     return accumulate(accumulatedValue1, accumulatedValue2);
   }
-  
+
   @Override
   public T getOutput(T accumulatedValue)
   {
     return accumulatedValue;
   }
-  
+
   @Override
   public T getRetraction(T value)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Min.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Min.java b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Min.java
index 48017a7..2b6247a 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Min.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/Min.java
@@ -28,20 +28,20 @@ import org.apache.apex.malhar.lib.window.Accumulation;
  */
 public class Min<T> implements Accumulation<T, T, T>
 {
-  
+
   Comparator<T> comparator;
-  
+
   public void setComparator(Comparator<T> comparator)
   {
     this.comparator = comparator;
   }
-  
+
   @Override
   public T defaultAccumulatedValue()
   {
     return null;
   }
-  
+
   @Override
   public T accumulate(T accumulatedValue, T input)
   {
@@ -55,19 +55,19 @@ public class Min<T> implements Accumulation<T, T, T>
       throw new RuntimeException("Tuple cannot be compared");
     }
   }
-  
+
   @Override
   public T merge(T accumulatedValue1, T accumulatedValue2)
   {
     return accumulate(accumulatedValue1, accumulatedValue2);
   }
-  
+
   @Override
   public T getOutput(T accumulatedValue)
   {
     return accumulatedValue;
   }
-  
+
   @Override
   public T getRetraction(T value)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicates.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicates.java b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicates.java
index 2548f72..53f3534 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicates.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicates.java
@@ -38,14 +38,14 @@ public class RemoveDuplicates<T> implements Accumulation<T, Set<T>, List<T>>
   {
     return new HashSet<>();
   }
-  
+
   @Override
   public Set<T> accumulate(Set<T> accumulatedValue, T input)
   {
     accumulatedValue.add(input);
     return accumulatedValue;
   }
-  
+
   @Override
   public Set<T> merge(Set<T> accumulatedValue1, Set<T> accumulatedValue2)
   {
@@ -54,7 +54,7 @@ public class RemoveDuplicates<T> implements Accumulation<T, Set<T>, List<T>>
     }
     return accumulatedValue1;
   }
-  
+
   @Override
   public List<T> getOutput(Set<T> accumulatedValue)
   {
@@ -64,7 +64,7 @@ public class RemoveDuplicates<T> implements Accumulation<T, Set<T>, List<T>>
       return new ArrayList<>(accumulatedValue);
     }
   }
-  
+
   @Override
   public List<T> getRetraction(List<T> value)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumDouble.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumDouble.java b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumDouble.java
index 11ab2ab..475d653 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumDouble.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumDouble.java
@@ -33,27 +33,27 @@ public class SumDouble implements Accumulation<Double, MutableDouble, Double>
   {
     return new MutableDouble(0.0);
   }
-  
+
   @Override
   public MutableDouble accumulate(MutableDouble accumulatedValue, Double input)
   {
     accumulatedValue.add(input);
     return accumulatedValue;
   }
-  
+
   @Override
   public MutableDouble merge(MutableDouble accumulatedValue1, MutableDouble accumulatedValue2)
   {
     accumulatedValue1.add(accumulatedValue2);
     return accumulatedValue1;
   }
-  
+
   @Override
   public Double getOutput(MutableDouble accumulatedValue)
   {
     return accumulatedValue.doubleValue();
   }
-  
+
   @Override
   public Double getRetraction(Double value)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumFloat.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumFloat.java b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumFloat.java
index d11bec3..dff3be6 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumFloat.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumFloat.java
@@ -33,27 +33,27 @@ public class SumFloat implements Accumulation<Float, MutableFloat, Float>
   {
     return new MutableFloat(0.);
   }
-  
+
   @Override
   public MutableFloat accumulate(MutableFloat accumulatedValue, Float input)
   {
     accumulatedValue.add(input);
     return accumulatedValue;
   }
-  
+
   @Override
   public MutableFloat merge(MutableFloat accumulatedValue1, MutableFloat accumulatedValue2)
   {
     accumulatedValue1.add(accumulatedValue2);
     return accumulatedValue1;
   }
-  
+
   @Override
   public Float getOutput(MutableFloat accumulatedValue)
   {
     return accumulatedValue.floatValue();
   }
-  
+
   @Override
   public Float getRetraction(Float value)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumInt.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumInt.java b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumInt.java
index cf0c50e..dca67a4 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumInt.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumInt.java
@@ -33,27 +33,27 @@ public class SumInt implements Accumulation<Integer, MutableInt, Integer>
   {
     return new MutableInt(0);
   }
-  
+
   @Override
   public MutableInt accumulate(MutableInt accumulatedValue, Integer input)
   {
     accumulatedValue.add(input);
     return accumulatedValue;
   }
-  
+
   @Override
   public MutableInt merge(MutableInt accumulatedValue1, MutableInt accumulatedValue2)
   {
     accumulatedValue1.add(accumulatedValue2);
     return accumulatedValue1;
   }
-  
+
   @Override
   public Integer getOutput(MutableInt accumulatedValue)
   {
     return accumulatedValue.intValue();
   }
-  
+
   @Override
   public Integer getRetraction(Integer value)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumLong.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumLong.java b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumLong.java
index 55908f5..027e4f8 100644
--- a/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumLong.java
+++ b/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation/SumLong.java
@@ -33,27 +33,27 @@ public class SumLong implements Accumulation<Long, MutableLong, Long>
   {
     return new MutableLong(0L);
   }
-  
+
   @Override
   public MutableLong accumulate(MutableLong accumulatedValue, Long input)
   {
     accumulatedValue.add(input);
     return accumulatedValue;
   }
-  
+
   @Override
   public MutableLong merge(MutableLong accumulatedValue1, MutableLong accumulatedValue2)
   {
     accumulatedValue1.add(accumulatedValue2);
     return accumulatedValue1;
   }
-  
+
   @Override
   public Long getOutput(MutableLong accumulatedValue)
   {
     return accumulatedValue.longValue();
   }
-  
+
   @Override
   public Long getRetraction(Long value)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/hadoop/io/file/tfile/DTBCFile.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/hadoop/io/file/tfile/DTBCFile.java b/library/src/main/java/org/apache/hadoop/io/file/tfile/DTBCFile.java
index 779b0f0..6db0557 100644
--- a/library/src/main/java/org/apache/hadoop/io/file/tfile/DTBCFile.java
+++ b/library/src/main/java/org/apache/hadoop/io/file/tfile/DTBCFile.java
@@ -47,18 +47,18 @@ import org.apache.hadoop.io.file.tfile.Compression.Algorithm;
 
 
 /**
- * 
+ *
  * <ul>
- * <li>The file format of DTFile is same as {@link TFile} with different reader implementation. 
+ * <li>The file format of DTFile is same as {@link TFile} with different reader implementation.
  * It reads data block by block and cache the binary block data into memory to speed up the random read.
- * 
- * <li>The public api of {@link Reader} is as same as it is in {@link TFile} {@link org.apache.hadoop.io.file.tfile.TFile.Reader} implementation. 
+ *
+ * <li>The public api of {@link Reader} is as same as it is in {@link TFile} {@link org.apache.hadoop.io.file.tfile.TFile.Reader} implementation.
  * Besides, it provides getBlockBuffer(), getKeyOffset(), getKeyLength(), getValueOffset(), getValueLength() method
  *  to expose raw block, key, value data to user to avoid unnecessary internal/external data copy
- *  
+ *
  * <li>In the performance test, It shows no difference in sequential reads and 20x faster in random reads(If most of them hit memory)
  * </ul>
- * 
+ *
  * Block Compressed file, the underlying physical storage layer for TFile.
  * BCFile provides the basic block level compression for the data block and meta
  * blocks. It is separated from TFile as it may be used for other
@@ -102,7 +102,7 @@ final class DTBCFile {
     private static interface BlockRegister {
       /**
        * Register a block that is fully closed.
-       * 
+       *
        * @param raw
        *          The size of block in terms of uncompressed bytes.
        * @param offsetStart
@@ -156,7 +156,7 @@ final class DTBCFile {
 
       /**
        * Get the output stream for BlockAppender's consumption.
-       * 
+       *
        * @return the output stream suitable for writing block data.
        */
       OutputStream getOutputStream() {
@@ -165,7 +165,7 @@ final class DTBCFile {
 
       /**
        * Get the current position in file.
-       * 
+       *
        * @return The current byte offset in underlying file.
        * @throws IOException
        */
@@ -179,7 +179,7 @@ final class DTBCFile {
 
       /**
        * Current size of compressed data.
-       * 
+       *
        * @return
        * @throws IOException
        */
@@ -206,7 +206,7 @@ final class DTBCFile {
 
     /**
      * Access point to stuff data into a block.
-     * 
+     *
      * TODO: Change DataOutputStream to something else that tracks the size as
      * long instead of int. Currently, we will wrap around if the row block size
      * is greater than 4GB.
@@ -219,7 +219,7 @@ final class DTBCFile {
 
       /**
        * Constructor
-       * 
+       *
        * @param register
        *          the block register, which is called when the block is closed.
        * @param wbs
@@ -233,7 +233,7 @@ final class DTBCFile {
 
       /**
        * Get the raw size of the block.
-       * 
+       *
        * @return the number of uncompressed bytes written through the
        *         BlockAppender so far.
        * @throws IOException
@@ -248,7 +248,7 @@ final class DTBCFile {
 
       /**
        * Get the compressed size of the block in progress.
-       * 
+       *
        * @return the number of compressed bytes written to the underlying FS
        *         file. The size may be smaller than actual need to compress the
        *         all data written due to internal buffering inside the
@@ -289,7 +289,7 @@ final class DTBCFile {
 
     /**
      * Constructor
-     * 
+     *
      * @param fout
      *          FS output stream.
      * @param compressionName
@@ -383,7 +383,7 @@ final class DTBCFile {
      * block. There can only be one BlockAppender stream active at any time.
      * Regular Blocks may not be created after the first Meta Blocks. The caller
      * must call BlockAppender.close() to conclude the block creation.
-     * 
+     *
      * @param name
      *          The name of the Meta Block. The name must not conflict with
      *          existing Meta Blocks.
@@ -407,7 +407,7 @@ final class DTBCFile {
      * active at any time. Regular Blocks may not be created after the first
      * Meta Blocks. The caller must call BlockAppender.close() to conclude the
      * block creation.
-     * 
+     *
      * @param name
      *          The name of the Meta Block. The name must not conflict with
      *          existing Meta Blocks.
@@ -426,7 +426,7 @@ final class DTBCFile {
      * block. There can only be one BlockAppender stream active at any time.
      * Data Blocks may not be created after the first Meta Blocks. The caller
      * must call BlockAppender.close() to conclude the block creation.
-     * 
+     *
      * @return The BlockAppender stream
      * @throws IOException
      */
@@ -474,7 +474,7 @@ final class DTBCFile {
     /**
      * Callback to make sure a data block is added to the internal list when
      * it's being closed.
-     * 
+     *
      */
     private class DataBlockRegister implements BlockRegister {
       DataBlockRegister() {
@@ -545,7 +545,7 @@ final class DTBCFile {
 
       /**
        * Get the output stream for BlockAppender's consumption.
-       * 
+       *
        * @return the output stream suitable for writing block data.
        */
       public ReusableByteArrayInputStream getInputStream() {
@@ -579,7 +579,7 @@ final class DTBCFile {
     public static class BlockReader extends DataInputStream {
       private final RBlockState rBlkState;
       private boolean closed = false;
-      
+
       private ReusableByteArrayInputStream wrappedInputStream = null;
 
       BlockReader(RBlockState rbs) {
@@ -607,7 +607,7 @@ final class DTBCFile {
 
       /**
        * Get the name of the compression algorithm used to compress the block.
-       * 
+       *
        * @return name of the compression algorithm.
        */
       public String getCompressionName() {
@@ -616,7 +616,7 @@ final class DTBCFile {
 
       /**
        * Get the uncompressed size of the block.
-       * 
+       *
        * @return uncompressed size of the block.
        */
       public long getRawSize() {
@@ -625,7 +625,7 @@ final class DTBCFile {
 
       /**
        * Get the compressed size of the block.
-       * 
+       *
        * @return compressed size of the block.
        */
       public long getCompressedSize() {
@@ -634,7 +634,7 @@ final class DTBCFile {
 
       /**
        * Get the starting position of the block in the file.
-       * 
+       *
        * @return the starting position of the block in the file.
        */
       public long getStartPos() {
@@ -646,7 +646,7 @@ final class DTBCFile {
         closed = false;
         rBlkState.renew();
       }
-      
+
       public ReusableByteArrayInputStream getBlockDataInputStream()
       {
         return wrappedInputStream;
@@ -655,7 +655,7 @@ final class DTBCFile {
 
     /**
      * Constructor
-     * 
+     *
      * @param fin
      *          FS input stream.
      * @param fileLength
@@ -696,7 +696,7 @@ final class DTBCFile {
 
     /**
      * Get the name of the default compression algorithm.
-     * 
+     *
      * @return the name of the default compression algorithm.
      */
     public String getDefaultCompressionName() {
@@ -705,7 +705,7 @@ final class DTBCFile {
 
     /**
      * Get version of BCFile file being read.
-     * 
+     *
      * @return version of BCFile file being read.
      */
     public Version getBCFileVersion() {
@@ -714,7 +714,7 @@ final class DTBCFile {
 
     /**
      * Get version of BCFile API.
-     * 
+     *
      * @return version of BCFile API.
      */
     public Version getAPIVersion() {
@@ -733,7 +733,7 @@ final class DTBCFile {
 
     /**
      * Get the number of data blocks.
-     * 
+     *
      * @return the number of data blocks.
      */
     public int getBlockCount() {
@@ -742,7 +742,7 @@ final class DTBCFile {
 
     /**
      * Stream access to a Meta Block.
-     * 
+     *
      * @param name
      *          meta block name
      * @return BlockReader input stream for reading the meta block.
@@ -763,7 +763,7 @@ final class DTBCFile {
 
     /**
      * Stream access to a Data Block.
-     * 
+     *
      * @param blockIndex
      *          0-based data block index.
      * @return BlockReader input stream for reading the data block.
@@ -797,7 +797,7 @@ final class DTBCFile {
     /**
      * Find the smallest Block index whose starting offset is greater than or
      * equal to the specified offset.
-     * 
+     *
      * @param offset
      *          User-specific offset.
      * @return the index to the data Block if such block exists; or -1

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java b/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java
index cb559dc..d9c483e 100644
--- a/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java
+++ b/library/src/main/java/org/apache/hadoop/io/file/tfile/ReusableByteArrayInputStream.java
@@ -28,9 +28,9 @@ import java.io.ByteArrayInputStream;
  */
 public class ReusableByteArrayInputStream extends ByteArrayInputStream
 {
-  
+
   private final int initialOffset;
-  
+
   private final int initialLength;
 
   public ReusableByteArrayInputStream(byte[] buf, int offset, int length)
@@ -53,12 +53,12 @@ public class ReusableByteArrayInputStream extends ByteArrayInputStream
     count = initialLength;
     mark = 0;
   }
-  
+
   public int getPos()
   {
     return pos;
   }
-  
+
   public byte[] getBuf()
   {
     return buf;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java b/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java
index 7f3061b..46222b1 100644
--- a/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java
+++ b/library/src/test/java/com/datatorrent/lib/algo/BottomNUnifierTest.java
@@ -32,13 +32,13 @@ public class BottomNUnifierTest
   @Test
   public void testUnifier()
   {
-    
+
     // Instantiate unifier
     BottomNUnifier<String, Integer> oper = new BottomNUnifier<>();
     oper.setN(2);
     CollectorTestSink sink = new CollectorTestSink();
     oper.mergedport.setSink(sink);
-    
+
     oper.beginWindow(1);
     ArrayList<Integer> values = new ArrayList<Integer>();
     values.add(5);
@@ -53,7 +53,7 @@ public class BottomNUnifierTest
     tuple.put("a", values);
     oper.process(tuple);
     oper.endWindow();
-    
+
     Assert.assertEquals("Tuples in sink", sink.collectedTuples.size(), 1);
     tuple = (HashMap<String, ArrayList<Integer>>)sink.collectedTuples.get(0);
     values = tuple.get("a");

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java b/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java
index dd19d2b..0c45542 100644
--- a/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/converter/MapToKeyValuePairConverterTest.java
@@ -30,25 +30,25 @@ import com.datatorrent.lib.util.TestUtils;
 public class MapToKeyValuePairConverterTest
 {
   @Test
-  public void MapToKeyValuePairConversion() 
+  public void MapToKeyValuePairConversion()
   {
     MapToKeyValuePairConverter<String, Integer> testop = new MapToKeyValuePairConverter<String, Integer>();
     Integer[] values = {1, 2, 3};
     String[] keys = {"a", "b", "c"};
-    
+
     HashMap<String, Integer> inputMap = new HashMap<String, Integer>();
 
     for (int i = 0; i < 3; i++) {
       inputMap.put(keys[i], values[i]);
     }
-    
-    CollectorTestSink<KeyValPair<String, Integer>> testsink = new CollectorTestSink<KeyValPair<String, Integer>>();    
+
+    CollectorTestSink<KeyValPair<String, Integer>> testsink = new CollectorTestSink<KeyValPair<String, Integer>>();
     TestUtils.setSink(testop.output, testsink);
-    
+
     testop.beginWindow(0);
-    
+
     testop.input.put(inputMap);
-    
+
     testop.endWindow();
 
     Assert.assertEquals(3,testsink.collectedTuples.size());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java b/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java
index 8a5eed2..22e9f72 100644
--- a/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/converter/StringValueToNumberConverterForMapTest.java
@@ -31,25 +31,25 @@ public class StringValueToNumberConverterForMapTest
 {
 
   @Test
-  public void testStringValueToNumericConversion() 
+  public void testStringValueToNumericConversion()
   {
     StringValueToNumberConverterForMap<String> testop = new StringValueToNumberConverterForMap<String>();
     String[] values = {"1.0", "2.0", "3.0"};
     String[] keys = {"a", "b", "c"};
-    
+
     HashMap<String, String> inputMap = new HashMap<String, String>();
 
     for (int i = 0; i < 3; i++) {
       inputMap.put(keys[i], values[i]);
     }
-    
-    CollectorTestSink<Map<String, Number>> testsink = new CollectorTestSink<Map<String, Number>>();    
+
+    CollectorTestSink<Map<String, Number>> testsink = new CollectorTestSink<Map<String, Number>>();
     TestUtils.setSink(testop.output, testsink);
-    
+
     testop.beginWindow(0);
-    
+
     testop.input.put(inputMap);
-    
+
     testop.endWindow();
 
     Assert.assertEquals(1,testsink.collectedTuples.size());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/db/cache/CacheStoreTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/db/cache/CacheStoreTest.java b/library/src/test/java/com/datatorrent/lib/db/cache/CacheStoreTest.java
index 335418a..61464dd 100644
--- a/library/src/test/java/com/datatorrent/lib/db/cache/CacheStoreTest.java
+++ b/library/src/test/java/com/datatorrent/lib/db/cache/CacheStoreTest.java
@@ -33,7 +33,7 @@ public class CacheStoreTest
   public void CacheStoreTest()
   {
     final Map<Object, Object> backupMap = Maps.newHashMap();
-    
+
     backupMap.put(1, "one");
     backupMap.put(2, "two");
     backupMap.put(3, "three");
@@ -44,7 +44,7 @@ public class CacheStoreTest
     backupMap.put(8, "eight");
     backupMap.put(9, "nine");
     backupMap.put(10, "ten");
-  
+
     CacheStore cs = new CacheStore();
     cs.setMaxCacheSize(5);
     try {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcIOAppTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcIOAppTest.java b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcIOAppTest.java
index 908f02f..5c398d2 100644
--- a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcIOAppTest.java
+++ b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcIOAppTest.java
@@ -121,7 +121,7 @@ public class JdbcIOAppTest
       lma.prepareDAG(new JdbcIOApp(), conf);
       LocalMode.Controller lc = lma.getController();
       lc.runAsync();
-      // wait for records to be added to table    
+      // wait for records to be added to table
       Thread.sleep(3000);
       lc.shutdown();
       Assert.assertEquals("Events in store", 10, getNumOfEventsInStore());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcOperatorTest.java b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcOperatorTest.java
index 1ffe256..ac17c2f 100644
--- a/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/db/jdbc/JdbcOperatorTest.java
@@ -121,12 +121,12 @@ public class JdbcOperatorTest
     {
       this.startTimestamp = startTimestamp;
     }
-  
+
     public double getScore()
     {
       return score;
     }
-  
+
     public void setScore(double score)
     {
       this.score = score;
@@ -225,7 +225,7 @@ public class JdbcOperatorTest
         pStmt.setDouble(6, new Double(55.4));
         pStmt.executeUpdate();
       }
-     
+
     } catch (SQLException e) {
       throw new RuntimeException(e);
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/filter/FilterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/filter/FilterTest.java b/library/src/test/java/com/datatorrent/lib/filter/FilterTest.java
index 1fe6484..567e27a 100644
--- a/library/src/test/java/com/datatorrent/lib/filter/FilterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/filter/FilterTest.java
@@ -175,7 +175,7 @@ public class FilterTest
 
     clearFilterOperator();
   }
-  
+
   @Test
   public void testOptionalExpressionFunctions()
   {
@@ -183,7 +183,7 @@ public class FilterTest
     prepareFilterOperator(DummyPublicPOJO.class, "({$}.val == 1)");
     Assert.assertEquals(6, filter.getExpressionFunctions().size());
   }
-  
+
   @Test
   public void testSetOptionalExpressionFunctionsItem()
   {
@@ -191,8 +191,8 @@ public class FilterTest
     prepareFilterOperator(DummyPublicPOJO.class, "({$}.val == 1)");
     Assert.assertEquals(6, filter.getExpressionFunctions().size());
   }
-  
-  
+
+
   @Before
   public void setup()
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java b/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java
index bb51ca4..c3e2cde 100644
--- a/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/formatter/XmlFormatterTest.java
@@ -173,7 +173,7 @@ public class XmlFormatterTest
         + "</EmployeeBean>";
     Assert.assertEquals(expected, validDataSink.collectedTuples.get(0));
   }
-  
+
   public static class DateAdapter extends XmlAdapter<String, Date>
   {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java
index e1f23d1..2f926d3 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileInputOperatorTest.java
@@ -88,7 +88,7 @@ public class AbstractFileInputOperatorTest
 
   @Rule
   public TestMeta testMeta = new TestMeta();
-  
+
   @Test
   public void testSinglePartiton() throws Exception
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java
index 0fff870..03f3bf6 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperatorTest.java
@@ -1422,7 +1422,7 @@ public class AbstractFileOutputOperatorTest
 
     Assert.assertEquals("Max length validation not thrown with -1 max length", true, error);
   }
-  
+
   @Test
   public void testPeriodicRotation()
   {
@@ -1439,7 +1439,7 @@ public class AbstractFileOutputOperatorTest
       for (int j = 0; j < i; ++j) {
         writer.input.put(2 * j + 1);
       }
-      writer.endWindow();      
+      writer.endWindow();
     }
     writer.committed(29);
     Set<String> fileNames = new TreeSet<String>();
@@ -1543,7 +1543,7 @@ public class AbstractFileOutputOperatorTest
     // http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4691425
     List<Long> evenOffsets = new ArrayList<Long>();
     List<Long> oddOffsets = new ArrayList<Long>();
-    
+
     writer.setFilePath(testMeta.getDir());
     writer.setAlwaysWriteToTmp(false);
     writer.setup(testMeta.testOperatorContext);
@@ -1633,7 +1633,7 @@ public class AbstractFileOutputOperatorTest
         throw new RuntimeException(e);
       }
     }
-    
+
     int numWindows = 0;
     try {
       fis = new FileInputStream(file);
@@ -1651,7 +1651,7 @@ public class AbstractFileOutputOperatorTest
           throw new RuntimeException(e);
         }
       }
-      
+
       long startOffset = 0;
       for (long offset : offsets) {
         // Skip initial case in case file is not yet created
@@ -1792,8 +1792,8 @@ public class AbstractFileOutputOperatorTest
     {
       counterStream = new CounterFilterOutputStream(outputStream);
     }
-    
-    public boolean isDoInit() 
+
+    public boolean isDoInit()
     {
       return (counterStream == null);
     }
@@ -1809,7 +1809,7 @@ public class AbstractFileOutputOperatorTest
     {
 
     }
-    
+
     public long getCounter()
     {
       if (isDoInit()) {
@@ -1817,10 +1817,10 @@ public class AbstractFileOutputOperatorTest
       } else {
         return counterStream.getCounter();
       }
-      
+
     }
   }
-  
+
   private static class CounterFilterOutputStream extends FilterOutputStream
   {
     long counter;
@@ -1830,7 +1830,7 @@ public class AbstractFileOutputOperatorTest
     {
       super(out);
     }
-    
+
     @Override
     public void write(int b) throws IOException
     {
@@ -1869,5 +1869,5 @@ public class AbstractFileOutputOperatorTest
       return counter;
     }
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java
index e5193b6..17febf6 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperatorTest.java
@@ -173,7 +173,7 @@ public class AbstractSingleFileOutputOperatorTest
   {
     writer.setOutputFileName(SINGLE_FILE);
     writer.setPartitionedFileNameformat("");
-    
+
     File meta = new File(testMeta.getDir());
     writer.setFilePath(meta.getAbsolutePath());
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/io/fs/FastMergerDecisionMakerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/FastMergerDecisionMakerTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/FastMergerDecisionMakerTest.java
index e1f57d9..e0ca9b6 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/FastMergerDecisionMakerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/FastMergerDecisionMakerTest.java
@@ -74,7 +74,7 @@ public class FastMergerDecisionMakerTest
 
   /**
    * If some block is missing then expect BlockNotFoundException.
-   * 
+   *
    * @throws IOException
    * @throws BlockNotFoundException
    */
@@ -111,7 +111,7 @@ public class FastMergerDecisionMakerTest
   /**
    * All blocks are of same size which is same as default blockSize. Then fast
    * merge is possible
-   * 
+   *
    * @throws IOException
    * @throws BlockNotFoundException
    */
@@ -126,7 +126,7 @@ public class FastMergerDecisionMakerTest
    * All blocks (except last block)are of same size which is same as default
    * blockSize. Last block is smaller than default blockSize Then fast merge is
    * possible
-   * 
+   *
    * @throws IOException
    * @throws BlockNotFoundException
    */
@@ -141,7 +141,7 @@ public class FastMergerDecisionMakerTest
   /**
    * Some block other than last block is of different size. Then fast merge is
    * not possible
-   * 
+   *
    * @throws IOException
    * @throws BlockNotFoundException
    */
@@ -156,7 +156,7 @@ public class FastMergerDecisionMakerTest
   /**
    * Some block other than last block is of different size. Then fast merge is
    * not possible
-   * 
+   *
    * @throws IOException
    * @throws BlockNotFoundException
    */
@@ -171,7 +171,7 @@ public class FastMergerDecisionMakerTest
   /**
    * Some block other than last block is of different size. Then fast merge is
    * not possible
-   * 
+   *
    * @throws IOException
    * @throws BlockNotFoundException
    */


[4/6] apex-malhar git commit: Fix trailing whitespace.

Posted by vr...@apache.org.
http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java
----------------------------------------------------------------------
diff --git a/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java b/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java
index 8e12fcc..21079d7 100644
--- a/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java
+++ b/demos/pi/src/test/java/com/datatorrent/demos/pi/CalculatorTest.java
@@ -30,7 +30,7 @@ public class CalculatorTest
 {
   @Test
   public void testSomeMethod() throws Exception
-  { 
+  {
     LocalMode lma = LocalMode.newInstance();
     Configuration conf = new Configuration(false);
     conf.addResource("dt-site-pilibrary.xml");

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/hive/src/main/java/com/datatorrent/contrib/hive/AbstractFSRollingOutputOperator.java
----------------------------------------------------------------------
diff --git a/hive/src/main/java/com/datatorrent/contrib/hive/AbstractFSRollingOutputOperator.java b/hive/src/main/java/com/datatorrent/contrib/hive/AbstractFSRollingOutputOperator.java
index 6360768..3c9c4da 100755
--- a/hive/src/main/java/com/datatorrent/contrib/hive/AbstractFSRollingOutputOperator.java
+++ b/hive/src/main/java/com/datatorrent/contrib/hive/AbstractFSRollingOutputOperator.java
@@ -210,7 +210,7 @@ public abstract class AbstractFSRollingOutputOperator<T> extends AbstractFileOut
    * written to. Example: If hive partitions are date='2014-12-12',country='USA'
    * then this method returns {"2014-12-12","USA"} The implementation is left to
    * the user.
-   * 
+   *
    * @param tuple
    *          A received tuple to be written to a hive partition.
    * @return ArrayList containing hive partition values.

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/hive/src/main/java/com/datatorrent/contrib/hive/HiveOperator.java
----------------------------------------------------------------------
diff --git a/hive/src/main/java/com/datatorrent/contrib/hive/HiveOperator.java b/hive/src/main/java/com/datatorrent/contrib/hive/HiveOperator.java
index 8e3b143..ed4ca85 100755
--- a/hive/src/main/java/com/datatorrent/contrib/hive/HiveOperator.java
+++ b/hive/src/main/java/com/datatorrent/contrib/hive/HiveOperator.java
@@ -61,7 +61,7 @@ public class HiveOperator extends AbstractStoreOutputOperator<FilePartitionMappi
 
   /**
    * Hive store.
-   * 
+   *
    * @deprecated use {@link AbstractStoreOutputOperator#store} instead
    */
   @Deprecated
@@ -226,7 +226,7 @@ public class HiveOperator extends AbstractStoreOutputOperator<FilePartitionMappi
 
   /**
    * Get the partition columns in hive to which data needs to be loaded.
-   * 
+   *
    * @return List of Hive Partition Columns
    */
   public ArrayList<String> getHivePartitionColumns()
@@ -236,7 +236,7 @@ public class HiveOperator extends AbstractStoreOutputOperator<FilePartitionMappi
 
   /**
    * Set the hive partition columns to which data needs to be loaded.
-   * 
+   *
    * @param hivePartitionColumns
    */
   public void setHivePartitionColumns(ArrayList<String> hivePartitionColumns)
@@ -246,7 +246,7 @@ public class HiveOperator extends AbstractStoreOutputOperator<FilePartitionMappi
 
   /**
    * Get the table name in hive.
-   * 
+   *
    * @return table name
    */
   public String getTablename()
@@ -256,7 +256,7 @@ public class HiveOperator extends AbstractStoreOutputOperator<FilePartitionMappi
 
   /**
    * Set the table name in hive.
-   * 
+   *
    * @param tablename
    */
   public void setTablename(String tablename)
@@ -266,7 +266,7 @@ public class HiveOperator extends AbstractStoreOutputOperator<FilePartitionMappi
 
   /**
    * Gets the store set for hive;
-   * 
+   *
    * @deprecated use {@link #getStore()} instead.
    * @return hive store
    */
@@ -278,7 +278,7 @@ public class HiveOperator extends AbstractStoreOutputOperator<FilePartitionMappi
 
   /**
    * Set the store in hive.
-   * 
+   *
    * @deprecated use {@link #setStore()} instead.
    * @param hivestore
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/hive/src/main/java/org/apache/apex/malhar/hive/HiveOutputModule.java
----------------------------------------------------------------------
diff --git a/hive/src/main/java/org/apache/apex/malhar/hive/HiveOutputModule.java b/hive/src/main/java/org/apache/apex/malhar/hive/HiveOutputModule.java
index 3491b3c..d859634 100644
--- a/hive/src/main/java/org/apache/apex/malhar/hive/HiveOutputModule.java
+++ b/hive/src/main/java/org/apache/apex/malhar/hive/HiveOutputModule.java
@@ -164,7 +164,7 @@ public class HiveOutputModule implements Module
 
   /**
    * The path of the directory to where files are written.
-   * 
+   *
    * @return file path
    */
   public String getFilePath()
@@ -174,7 +174,7 @@ public class HiveOutputModule implements Module
 
   /**
    * The path of the directory to where files are written.
-   * 
+   *
    * @param filePath
    *          file path
    */
@@ -185,7 +185,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Names of the columns in hive table (excluding partitioning columns).
-   * 
+   *
    * @return Hive column names
    */
   public String[] getHiveColumns()
@@ -195,7 +195,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Names of the columns in hive table (excluding partitioning columns).
-   * 
+   *
    * @param hiveColumns
    *          Hive column names
    */
@@ -207,7 +207,7 @@ public class HiveOutputModule implements Module
   /**
    * Data types of the columns in hive table (excluding partitioning columns).
    * This sequence should match to the fields in hiveColumnDataTypes
-   * 
+   *
    * @return Hive column data types
    */
   public FIELD_TYPE[] getHiveColumnDataTypes()
@@ -218,7 +218,7 @@ public class HiveOutputModule implements Module
   /**
    * Data types of the columns in hive table (excluding partitioning columns).
    * This sequence should match to the fields in hiveColumnDataTypes *
-   * 
+   *
    * @param hiveColumnDataTypes
    *          Hive column data types
    */
@@ -230,7 +230,7 @@ public class HiveOutputModule implements Module
   /**
    * Expressions for the hive columns (excluding partitioning columns). This
    * sequence should match to the fields in hiveColumnDataTypes
-   * 
+   *
    * @return
    */
   public String[] getExpressionsForHiveColumns()
@@ -241,7 +241,7 @@ public class HiveOutputModule implements Module
   /**
    * Expressions for the hive columns (excluding partitioning columns). This
    * sequence should match to the fields in hiveColumnDataTypes
-   * 
+   *
    * @param expressionsForHiveColumns
    */
   public void setExpressionsForHiveColumns(String[] expressionsForHiveColumns)
@@ -251,7 +251,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Names of the columns on which hive data should be partitioned
-   * 
+   *
    * @return hive partition columns
    */
   public String[] getHivePartitionColumns()
@@ -261,7 +261,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Names of the columns on which hive data should be partitioned
-   * 
+   *
    * @param hivePartitionColumns
    *          Hive partition columns
    */
@@ -273,7 +273,7 @@ public class HiveOutputModule implements Module
   /**
    * Data types of the columns on which hive data should be partitioned. This
    * sequence should match to the fields in hivePartitionColumns
-   * 
+   *
    * @return Hive partition column data types
    */
   public FIELD_TYPE[] getHivePartitionColumnDataTypes()
@@ -284,7 +284,7 @@ public class HiveOutputModule implements Module
   /**
    * Data types of the columns on which hive data should be partitioned. This
    * sequence should match to the fields in hivePartitionColumns
-   * 
+   *
    * @param hivePartitionColumnDataTypes
    *          Hive partition column data types
    */
@@ -296,7 +296,7 @@ public class HiveOutputModule implements Module
   /**
    * Expressions for the hive partition columns. This sequence should match to
    * the fields in hivePartitionColumns
-   * 
+   *
    * @return Expressions for hive partition columns
    */
   public String[] getExpressionsForHivePartitionColumns()
@@ -307,7 +307,7 @@ public class HiveOutputModule implements Module
   /**
    * Expressions for the hive partition columns. This sequence should match to
    * the fields in hivePartitionColumns
-   * 
+   *
    * @param expressionsForHivePartitionColumns
    *          Expressions for hive partition columns
    */
@@ -318,7 +318,7 @@ public class HiveOutputModule implements Module
 
   /**
    * The maximum length in bytes of a rolling file.
-   * 
+   *
    * @return maximum size of file
    */
   public Long getMaxLength()
@@ -328,7 +328,7 @@ public class HiveOutputModule implements Module
 
   /**
    * The maximum length in bytes of a rolling file.
-   * 
+   *
    * @param maxLength
    *          maximum size of file
    */
@@ -339,7 +339,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Connection URL for connecting to hive.
-   * 
+   *
    * @return database url
    */
   public String getDatabaseUrl()
@@ -349,7 +349,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Connection URL for connecting to hive.
-   * 
+   *
    * @param databaseUrl
    *          database url
    */
@@ -360,7 +360,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Driver for connecting to hive.
-   * 
+   *
    * @return database driver
    */
   public String getDatabaseDriver()
@@ -370,7 +370,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Driver for connecting to hive.
-   * 
+   *
    * @param databaseDriver
    *          database driver
    */
@@ -381,7 +381,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Username for connecting to hive
-   * 
+   *
    * @return user name
    */
   public String getUserName()
@@ -391,7 +391,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Username for connecting to hive
-   * 
+   *
    * @param username
    *          user name
    */
@@ -402,7 +402,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Password for connecting to hive
-   * 
+   *
    * @return password
    */
   public String getPassword()
@@ -412,7 +412,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Password for connecting to hive
-   * 
+   *
    * @param password
    *          password
    */
@@ -423,7 +423,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Table name for writing data into hive
-   * 
+   *
    * @return table name
    */
   public String getTablename()
@@ -433,7 +433,7 @@ public class HiveOutputModule implements Module
 
   /**
    * Table name for writing data into hive
-   * 
+   *
    * @param tablename
    *          table name
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/kafka/src/main/java/org/apache/apex/malhar/kafka/AbstractKafkaPartitioner.java
----------------------------------------------------------------------
diff --git a/kafka/src/main/java/org/apache/apex/malhar/kafka/AbstractKafkaPartitioner.java b/kafka/src/main/java/org/apache/apex/malhar/kafka/AbstractKafkaPartitioner.java
index 772399d..ad5c3fa 100644
--- a/kafka/src/main/java/org/apache/apex/malhar/kafka/AbstractKafkaPartitioner.java
+++ b/kafka/src/main/java/org/apache/apex/malhar/kafka/AbstractKafkaPartitioner.java
@@ -110,20 +110,20 @@ public abstract class AbstractKafkaPartitioner implements Partitioner<AbstractKa
                 metadata.get(clusters[i]).put(topic, ptis);
                 break;
               }
-              
+
               logger.warn("Partition metadata for topic {} is null. retrying...", topic);
-              
+
             } catch (Exception e) {
               logger.warn("Got Exception when trying get partition info for topic {}.", topic, e);
             }
-  
+
             try {
               Thread.sleep(100);
             } catch (Exception e1) {
               //ignore
             }
           } //end while
-          
+
           if (tryTime == 0) {
             throw new RuntimeException("Get partition info for topic completely failed. Please check the log file. topic name: " + topic);
           }
@@ -183,8 +183,8 @@ public abstract class AbstractKafkaPartitioner implements Partitioner<AbstractKa
     }
     metadataRefreshClients = null;
   }
-  
-  
+
+
   @Override
   public void partitioned(Map<Integer, Partition<AbstractKafkaInputOperator>> map)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/kafka/src/main/java/org/apache/apex/malhar/kafka/KafkaConsumerWrapper.java
----------------------------------------------------------------------
diff --git a/kafka/src/main/java/org/apache/apex/malhar/kafka/KafkaConsumerWrapper.java b/kafka/src/main/java/org/apache/apex/malhar/kafka/KafkaConsumerWrapper.java
index 143a5bd..fa4856e 100644
--- a/kafka/src/main/java/org/apache/apex/malhar/kafka/KafkaConsumerWrapper.java
+++ b/kafka/src/main/java/org/apache/apex/malhar/kafka/KafkaConsumerWrapper.java
@@ -228,7 +228,7 @@ public class KafkaConsumerWrapper implements Closeable
       }
     }
   }
-  
+
   protected void handleNoOffsetForPartitionException(NoOffsetForPartitionException e, KafkaConsumer<byte[], byte[]> consumer)
   {
     // if initialOffset is set to EARLIST or LATEST
@@ -244,7 +244,7 @@ public class KafkaConsumerWrapper implements Closeable
     } else {
       consumer.seekToEnd(e.partitions().toArray(new TopicPartition[0]));
     }
-  
+
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaInputOperatorTest.java b/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaInputOperatorTest.java
index 8440615..4e97d72 100644
--- a/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaInputOperatorTest.java
+++ b/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaInputOperatorTest.java
@@ -66,7 +66,7 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
   private String partition = null;
 
   private String testName = "";
-  
+
   public static String APPLICATION_PATH = baseDir + File.separator + StramLocalCluster.class.getName() + File.separator;
 
   public class KafkaTestInfo extends TestWatcher
@@ -86,11 +86,11 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
       this.desc = description;
     }
   }
-  
+
   @Rule
   public final KafkaTestInfo testInfo = new KafkaTestInfo();
-  
-  
+
+
   @Parameterized.Parameters(name = "multi-cluster: {0}, multi-partition: {1}, partition: {2}")
   public static Collection<Object[]> testScenario()
   {
@@ -116,7 +116,7 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
     tupleCollection.clear();
     //reset count for next new test case
     k = 0;
-    
+
     createTopic(0, testName);
     if (hasMultiCluster) {
       createTopic(1, testName);
@@ -146,14 +146,14 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
   private static final int failureTrigger = 3 * scale;
   private static final int tuplesPerWindow = 5 * scale;
   private static final int waitTime = 60000 + 300 * scale;
-  
-  //This latch was used to count the END_TUPLE, but the order of tuple can't be guaranteed, 
+
+  //This latch was used to count the END_TUPLE, but the order of tuple can't be guaranteed,
   //so, count valid tuple instead.
   private static CountDownLatch latch;
   private static boolean hasFailure = false;
   private static int k = 0;
   private static Thread monitorThread;
-  
+
   /**
    * Test Operator to collect tuples from KafkaSingleInputStringOperator.
    *
@@ -179,7 +179,7 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
     transient List<String> windowTupleCollector = Lists.newArrayList();
     private transient Map<String, List<String>> tupleCollectedInWindow = new HashMap<>();
     private int endTuples = 0;
-    
+
     @Override
     public void setup(Context.OperatorContext context)
     {
@@ -196,7 +196,7 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
       endTuples = 0;
     }
 
-    
+
     public void processTuple(byte[] bt)
     {
       String tuple = new String(bt);
@@ -208,10 +208,10 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
       if (tuple.startsWith(KafkaOperatorTestBase.END_TUPLE)) {
         endTuples++;
       }
-      
+
       windowTupleCollector.add(tuple);
     }
-    
+
     @Override
     public void endWindow()
     {
@@ -231,7 +231,7 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
       //discard the tuples of this window if except happened
       int tupleSize = windowTupleCollector.size();
       tupleCollection.addAll(windowTupleCollector);
-      
+
       int countDownTupleSize = countDownAll ? tupleSize : endTuples;
 
       if (latch != null) {
@@ -303,8 +303,8 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
     // each broker should get a END_TUPLE message
     latch = new CountDownLatch(countDownAll ? totalCount + totalBrokers : totalBrokers);
 
-    logger.info("Test Case: name: {}; totalBrokers: {}; hasFailure: {}; hasMultiCluster: {}; hasMultiPartition: {}, partition: {}", 
-        testName, totalBrokers, hasFailure, hasMultiCluster, hasMultiPartition, partition); 
+    logger.info("Test Case: name: {}; totalBrokers: {}; hasFailure: {}; hasMultiCluster: {}; hasMultiPartition: {}, partition: {}",
+        testName, totalBrokers, hasFailure, hasMultiCluster, hasMultiPartition, partition);
 
     // Start producer
     KafkaTestProducer p = new KafkaTestProducer(testName, hasMultiPartition, hasMultiCluster);
@@ -313,7 +313,7 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
     t.start();
 
     int expectedReceiveCount = totalCount + totalBrokers;
-    
+
     // Create DAG for testing.
     LocalMode lma = LocalMode.newInstance();
     DAG dag = lma.getDAG();
@@ -346,7 +346,7 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
     LocalMode.Controller lc = lma.getController();
     lc.setHeartbeatMonitoringEnabled(false);
 
-    //let the Controller to run the inside another thread. It is almost same as call Controller.runAsync(), 
+    //let the Controller to run the inside another thread. It is almost same as call Controller.runAsync(),
     //but Controller.runAsync() don't expose the thread which run it, so we don't know when the thread will be terminated.
     //create this thread and then call join() to make sure the Controller shutdown completely.
     monitorThread = new Thread((StramLocalCluster)lc, "master");
@@ -363,9 +363,9 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
     } catch (Exception e) {
       logger.warn(e.getMessage());
     }
-    
+
     t.join();
-    
+
     if (!notTimeout || expectedReceiveCount != tupleCollection.size()) {
       logger.info("Number of received/expected tuples: {}/{}, testName: {}, tuples: \n{}", tupleCollection.size(),
           expectedReceiveCount, testName, tupleCollection);
@@ -373,13 +373,13 @@ public class KafkaInputOperatorTest extends KafkaOperatorTestBase
     Assert.assertTrue("TIMEOUT. testName: " + this.testName + "; Collected data: " + tupleCollection, notTimeout);
 
     // Check results
-    Assert.assertTrue( "testName: " + testName + "; Collected tuple size: " + tupleCollection.size() + "; Expected tuple size: " + expectedReceiveCount + "; data: \n" + tupleCollection, 
+    Assert.assertTrue( "testName: " + testName + "; Collected tuple size: " + tupleCollection.size() + "; Expected tuple size: " + expectedReceiveCount + "; data: \n" + tupleCollection,
         expectedReceiveCount == tupleCollection.size());
-    
+
     logger.info("End of test case: {}", testName);
   }
 
-  
+
   private void setupHasFailureTest(KafkaSinglePortInputOperator operator, DAG dag)
   {
     operator.setHoldingBufferSize(5000);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestPartitioner.java
----------------------------------------------------------------------
diff --git a/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestPartitioner.java b/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestPartitioner.java
index e6256f1..21f8977 100644
--- a/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestPartitioner.java
+++ b/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestPartitioner.java
@@ -34,7 +34,7 @@ import kafka.utils.VerifiableProperties;
 public class KafkaTestPartitioner implements Partitioner
 {
   public KafkaTestPartitioner(VerifiableProperties props) {
-    
+
   }
 
   public KafkaTestPartitioner() {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestProducer.java
----------------------------------------------------------------------
diff --git a/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestProducer.java b/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestProducer.java
index 2f24a8a..ca6cc98 100644
--- a/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestProducer.java
+++ b/kafka/src/test/java/org/apache/apex/malhar/kafka/KafkaTestProducer.java
@@ -106,7 +106,7 @@ public class KafkaTestProducer implements Runnable
   }
 
   private transient List<Future<RecordMetadata>> sendTasks = Lists.newArrayList();
-  
+
   private void generateMessages()
   {
     // Create dummy message
@@ -140,12 +140,12 @@ public class KafkaTestProducer implements Runnable
         sendTasks.add(producer.send(new ProducerRecord<>(topic, "", msg)));
       }
     }
-    
+
     producer.flush();
     if (producer1!=null) {
       producer1.flush();
     }
-    
+
     try {
       for (Future<RecordMetadata> task : sendTasks) {
         task.get(30, TimeUnit.SECONDS);
@@ -153,7 +153,7 @@ public class KafkaTestProducer implements Runnable
     } catch (Exception e) {
       throw new RuntimeException(e);
     }
-    
+
     close();
   }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeMapPrimitive.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeMapPrimitive.java b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeMapPrimitive.java
index cd61e20..8c8b83a 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeMapPrimitive.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/gpo/SerdeMapPrimitive.java
@@ -74,7 +74,7 @@ public class SerdeMapPrimitive  implements Serde
     GPOType gpoType = GPOType.GPO_TYPE_ARRAY[type.ordinal()];
     bytes.add(gpoType.serialize(object));
   }
-  
+
   @Override
   public synchronized Object deserializeObject(byte[] objectBytes, MutableInt offset)
   {
@@ -87,7 +87,7 @@ public class SerdeMapPrimitive  implements Serde
       int typeOrdinal = GPOUtils.deserializeInt(objectBytes, offset);
       GPOType gpoType = GPOType.GPO_TYPE_ARRAY[typeOrdinal];
       Object key = gpoType.deserialize(objectBytes, offset);
-      
+
       typeOrdinal = GPOUtils.deserializeInt(objectBytes, offset);
       gpoType = GPOType.GPO_TYPE_ARRAY[typeOrdinal];
       Object value = gpoType.deserialize(objectBytes, offset);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java b/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java
index 83e8634..4d631c3 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/query/WindowBoundedService.java
@@ -104,7 +104,7 @@ public class WindowBoundedService implements Component<OperatorContext>
     mutex.release();
 
     executorThread.shutdown();
-    
+
     try {
       executorThread.awaitTermination(10000L + executeIntervalMillis, TimeUnit.MILLISECONDS);
     } catch (InterruptedException ex) {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java
index 2333dbb..59625f9 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalConfigurationSchema.java
@@ -2201,7 +2201,7 @@ public class DimensionalConfigurationSchema
   {
     return getDimensionsDescriptorIDToIncrementalAggregatorIDs();
   }
-  
+
   public List<IntArrayList> getDimensionsDescriptorIDToCompositeAggregatorIDs()
   {
     return dimensionsDescriptorIDToCompositeAggregatorIDs;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java
index 6138971..4fef2df 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/DimensionalSchema.java
@@ -100,7 +100,7 @@ public class DimensionalSchema implements Schema
       new Fields(Sets.newHashSet(FIELD_TIME_FROM, FIELD_TIME_TO)));
 
   public static final String FIELD_RESPONSE_DELAY_MILLS = "responseDelayMillis";
-  
+
   /**
    * The from value for the schema. Null if there is no from value.
    */
@@ -164,7 +164,7 @@ public class DimensionalSchema implements Schema
   private int schemaID = Schema.DEFAULT_SCHEMA_ID;
 
   protected long responseDelayMillis;
-  
+
   /**
    * Constructor for serialization
    */
@@ -249,7 +249,7 @@ public class DimensionalSchema implements Schema
       long responseDelayMillis)
   {
     this(schemaStub,
-        configurationSchema, 
+        configurationSchema,
         responseDelayMillis);
     this.schemaID = schemaID;
   }
@@ -391,7 +391,7 @@ public class DimensionalSchema implements Schema
 
     schema.put(SnapshotSchema.FIELD_SCHEMA_TYPE, DimensionalSchema.SCHEMA_TYPE);
     schema.put(SnapshotSchema.FIELD_SCHEMA_VERSION, DimensionalSchema.SCHEMA_VERSION);
-    
+
     //responseDelayMillis
     if (responseDelayMillis > 0) {
       schema.put(FIELD_RESPONSE_DELAY_MILLS, responseDelayMillis);
@@ -459,10 +459,10 @@ public class DimensionalSchema implements Schema
     for (int combinationID = 0;
         combinationID < configurationSchema.getDimensionsDescriptorIDToKeys().size();
         combinationID++) {
-      
+
       //TODO: the auto-generated combination for computation of composite aggregator will be added.
       //should remove it.
-      
+
       Fields fields = configurationSchema.getDimensionsDescriptorIDToKeys().get(combinationID);
       Map<String, Set<String>> fieldToAggregatorAdditionalValues =
           configurationSchema.getDimensionsDescriptorIDToFieldToAggregatorAdditionalValues().get(combinationID);
@@ -515,7 +515,7 @@ public class DimensionalSchema implements Schema
 
         combination.put(DimensionalConfigurationSchema.FIELD_DIMENSIONS_ADDITIONAL_VALUES, additionalValueArray);
       }
-      
+
       dimensions.put(combination);
     }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java
index 8260c81..4460b51 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/Schema.java
@@ -34,7 +34,7 @@ public interface Schema
   public static final String FIELD_SCHEMA_KEYS = "schemaKeys";
   public static final String FIELD_SCHEMA = "schema";
   public static final String FIELD_SCHEMA_TAGS = "tags";
-  
+
   /**
    * The id of the schema. This is relevant for operators which support serving multiple schemas,
    * in which each schema will need a unique id.

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java
index 5010580..b1e6d36 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/schemas/SnapshotSchema.java
@@ -245,7 +245,7 @@ public class SnapshotSchema implements Schema
 
     schemaJSON = schema.toString();
   }
-  
+
   /**
    * This is a helper method which sets the JSON that represents this schema.
    * @param schemaJSON The JSON that represents this schema.

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java b/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java
index 0b03e79..19e142b 100644
--- a/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java
+++ b/library/src/main/java/com/datatorrent/lib/appdata/snapshot/AbstractAppDataSnapshotServer.java
@@ -107,9 +107,9 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
    * The queryExecutor execute the query and return the result.
    */
   protected QueryExecutor<Query, Void, MutableLong, Result> queryExecutor;
-   
+
   private Set<String> tags;
-  
+
   @AppData.QueryPort
   @InputPortFieldAnnotation(optional = true)
   public final transient DefaultInputPort<String> query = new DefaultInputPort<String>()
@@ -120,7 +120,7 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
       processQuery(queryJSON);
     }
   };
-  
+
   /**
    * process the query send.
    * provide this method to give sub class a chance to override.
@@ -169,7 +169,7 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
       currentData.add(gpoRow);
     }
   }
-  
+
   /**
    * Create operator.
    */
@@ -199,11 +199,11 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
   public void setup(OperatorContext context)
   {
     setupSchema();
-    
+
     schemaRegistry = new SchemaRegistrySingle(schema);
     //Setup for query processing
     setupQueryProcessor();
-    
+
     queryDeserializerFactory = new MessageDeserializerFactory(SchemaQuery.class,
                                                            DataQuerySnapshot.class);
     queryDeserializerFactory.setContext(DataQuerySnapshot.class, schemaRegistry);
@@ -228,7 +228,7 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
 
   protected void setupQueryProcessor()
   {
-    queryProcessor = QueryManagerSynchronous.newInstance(queryExecutor == null ? new SnapshotComputer() : queryExecutor, 
+    queryProcessor = QueryManagerSynchronous.newInstance(queryExecutor == null ? new SnapshotComputer() : queryExecutor,
         new AppDataWindowEndQueueManager<Query, Void>());
   }
 
@@ -378,6 +378,6 @@ public abstract class AbstractAppDataSnapshotServer<INPUT_EVENT> implements Oper
   {
     this.tags = tags;
   }
-  
-  
+
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/bandwidth/BandwidthPartitioner.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/bandwidth/BandwidthPartitioner.java b/library/src/main/java/com/datatorrent/lib/bandwidth/BandwidthPartitioner.java
index 2b503ed..9b9eb8d 100644
--- a/library/src/main/java/com/datatorrent/lib/bandwidth/BandwidthPartitioner.java
+++ b/library/src/main/java/com/datatorrent/lib/bandwidth/BandwidthPartitioner.java
@@ -42,7 +42,7 @@ public class BandwidthPartitioner<T extends BandwidthLimitingOperator> extends S
 
   /**
    * This constructor is used to create the partitioner from a property.
-   * 
+   *
    * @param value A string which is an integer of the number of partitions to create
    */
   public BandwidthPartitioner(String value)
@@ -52,7 +52,7 @@ public class BandwidthPartitioner<T extends BandwidthLimitingOperator> extends S
 
   /**
    * This creates a partitioner which creates partitonCount partitions.
-   * 
+   *
    * @param partitionCount The number of partitions to create.
    */
   public BandwidthPartitioner(int partitionCount)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/codec/package-info.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/codec/package-info.java b/library/src/main/java/com/datatorrent/lib/codec/package-info.java
index ded8689..d876e3f 100644
--- a/library/src/main/java/com/datatorrent/lib/codec/package-info.java
+++ b/library/src/main/java/com/datatorrent/lib/codec/package-info.java
@@ -17,7 +17,7 @@
  * under the License.
  */
 /**
- *  Shared codec implementations. 
+ *  Shared codec implementations.
  */
 @org.apache.hadoop.classification.InterfaceStability.Evolving
 package com.datatorrent.lib.codec;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/converter/Converter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/converter/Converter.java b/library/src/main/java/com/datatorrent/lib/converter/Converter.java
index ef999e4..3799cd2 100644
--- a/library/src/main/java/com/datatorrent/lib/converter/Converter.java
+++ b/library/src/main/java/com/datatorrent/lib/converter/Converter.java
@@ -24,7 +24,7 @@ import org.apache.hadoop.classification.InterfaceStability;
  * Operators that are converting tuples from one format to another must
  * implement this interface. Eg. Parsers or formatters , that parse data of
  * certain format and convert them to another format.
- * 
+ *
  * @param <INPUT>
  * @param <OUTPUT>
  * @since 3.2.0
@@ -35,7 +35,7 @@ public interface Converter<INPUT, OUTPUT>
   /**
    * Provide the implementation for converting tuples from one format to the
    * other
-   * 
+   *
    * @param tuple tuple of certain format
    * @return OUTPUT tuple of converted format
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/db/KeyValueStore.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/KeyValueStore.java b/library/src/main/java/com/datatorrent/lib/db/KeyValueStore.java
index 4211d3d..76759a4 100644
--- a/library/src/main/java/com/datatorrent/lib/db/KeyValueStore.java
+++ b/library/src/main/java/com/datatorrent/lib/db/KeyValueStore.java
@@ -62,7 +62,7 @@ public interface KeyValueStore extends Connectable
 
   /**
    * Removes the key and the value given the key
-   * 
+   *
    * @param key
    */
   public void remove(Object key);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPOJOOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPOJOOutputOperator.java b/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPOJOOutputOperator.java
index 90111d8..38d44a0 100644
--- a/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPOJOOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPOJOOutputOperator.java
@@ -94,7 +94,7 @@ public abstract class AbstractJdbcPOJOOutputOperator extends AbstractJdbcTransac
     super();
     columnFieldGetters = Lists.newArrayList();
   }
-  
+
   protected static class ActiveFieldInfo
   {
     final FieldInfo fieldInfo;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPollInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPollInputOperator.java b/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPollInputOperator.java
index f9fb714..6bd5121 100644
--- a/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPollInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/jdbc/AbstractJdbcPollInputOperator.java
@@ -79,11 +79,11 @@ import static org.jooq.impl.DSL.field;
  * partitions for fetching the existing data in the table. And an additional
  * single partition for polling additive data. Assumption is that there is an
  * ordered unique column using which range queries can be formed<br>
- * 
+ *
  * Only newly added data will be fetched by the polling jdbc partition, also
  * assumption is rows won't be added or deleted in middle during scan.
- * 
- * 
+ *
+ *
  * @displayName Jdbc Polling Input Operator
  * @category Input
  * @tags database, sql, jdbc, partitionable, idepotent, pollable

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcPollInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcPollInputOperator.java b/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcPollInputOperator.java
index d139379..9a76103 100644
--- a/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcPollInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcPollInputOperator.java
@@ -33,7 +33,7 @@ import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 /**
  * A concrete implementation for {@link AbstractJdbcPollInputOperator} to
  * consume data from jdbc store and emit comma separated values <br>
- * 
+ *
  * @displayName Jdbc Polling Input Operator
  * @category Input
  * @tags database, sql, jdbc

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java
index 7dfe4e9..f11f2bc 100644
--- a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java
+++ b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileImpl.java
@@ -32,8 +32,8 @@ import org.apache.hadoop.io.file.tfile.TFile.Writer;
 /**
  * A TFile wrapper with FileAccess API
  * <ul>
- * <li>{@link TFileImpl.DefaultTFileImpl} return default TFile {@link Reader} and {@link Writer} for IO operations</li> 
- * <li>{@link TFileImpl.DTFileImpl} return DTFile {@link org.apache.hadoop.io.file.tfile.DTFile.Reader}(which is faster than default TFile reader) and {@link Writer} for IO operations</li> 
+ * <li>{@link TFileImpl.DefaultTFileImpl} return default TFile {@link Reader} and {@link Writer} for IO operations</li>
+ * <li>{@link TFileImpl.DTFileImpl} return DTFile {@link org.apache.hadoop.io.file.tfile.DTFile.Reader}(which is faster than default TFile reader) and {@link Writer} for IO operations</li>
  * </ul>
  *
  * @since 2.0.0
@@ -44,16 +44,16 @@ public abstract class TFileImpl extends FileAccessFSImpl
   private int minBlockSize = 64 * 1024;
 
   private String compressName = TFile.COMPRESSION_NONE;
-  
+
   private String comparator = "memcmp";
-  
+
   private int chunkSize = 1024 * 1024;
-  
+
   private int inputBufferSize = 256 * 1024;
-  
+
   private int outputBufferSize = 256 * 1024;
 
-  
+
   private void setupConfig(Configuration conf)
   {
     conf.set("tfile.io.chunk.size", String.valueOf(chunkSize));
@@ -69,7 +69,7 @@ public abstract class TFileImpl extends FileAccessFSImpl
     setupConfig(fs.getConf());
     return new TFileWriter(fsdos, minBlockSize, compressName, comparator, fs.getConf());
   }
-  
+
   public int getMinBlockSize()
   {
     return minBlockSize;
@@ -140,13 +140,13 @@ public abstract class TFileImpl extends FileAccessFSImpl
   {
     this.outputBufferSize = outputBufferSize;
   }
-  
+
   /**
    * Return {@link TFile} {@link Reader}
    */
   public static class DefaultTFileImpl extends TFileImpl
   {
-    
+
     @Override
     public FileReader getReader(long bucketKey, String fileName) throws IOException
     {
@@ -155,15 +155,15 @@ public abstract class TFileImpl extends FileAccessFSImpl
       super.setupConfig(fs.getConf());
       return new TFileReader(fsdis, fileLength, fs.getConf());
     }
-    
+
   }
-  
+
   /**
    * Return {@link DTFile} {@link org.apache.hadoop.io.file.tfile.DTFile.Reader}
    */
   public static class DTFileImpl extends TFileImpl
   {
-    
+
     @Override
     public FileReader getReader(long bucketKey, String fileName) throws IOException
     {
@@ -172,7 +172,7 @@ public abstract class TFileImpl extends FileAccessFSImpl
       super.setupConfig(fs.getConf());
       return new DTFileReader(fsdis, fileLength, fs.getConf());
     }
-    
+
   }
 
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java
index 7e9d544..da724d4 100644
--- a/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java
+++ b/library/src/main/java/com/datatorrent/lib/fileaccess/TFileWriter.java
@@ -34,15 +34,15 @@ import org.apache.hadoop.io.file.tfile.TFile.Writer;
 public final class TFileWriter implements FileAccess.FileWriter
 {
   private Writer writer;
-  
+
   private FSDataOutputStream fsdos;
-  
+
   public TFileWriter(FSDataOutputStream stream, int minBlockSize, String compressName,
       String comparator, Configuration conf) throws IOException
   {
     this.fsdos = stream;
     writer = new Writer(stream, minBlockSize, compressName, comparator, conf);
-    
+
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/filter/FilterOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/filter/FilterOperator.java b/library/src/main/java/com/datatorrent/lib/filter/FilterOperator.java
index 2a54e0f..6fccf1e 100644
--- a/library/src/main/java/com/datatorrent/lib/filter/FilterOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/filter/FilterOperator.java
@@ -56,7 +56,7 @@ import com.datatorrent.lib.util.PojoUtils;
  * - truePort emits POJOs meeting the given condition
  * - falsePort emits POJOs not meeting the given condition
  * - error port emits any error situation while evaluating expression
- * 
+ *
  *
  * @since 3.5.0
  */
@@ -234,6 +234,6 @@ public class FilterOperator extends BaseOperator implements Operator.ActivationL
   {
     return expressionFunctions;
   }
-  
+
   private static final Logger logger = LoggerFactory.getLogger(FilterOperator.class);
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java b/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java
index db8dbc4..ef3c304 100644
--- a/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java
+++ b/library/src/main/java/com/datatorrent/lib/formatter/Formatter.java
@@ -40,7 +40,7 @@ import com.datatorrent.lib.converter.Converter;
  * <b>err</b>: emits &lt;Object&gt; error port that emits input tuple that could
  * not be converted<br>
  * <br>
- * 
+ *
  * @displayName Parser
  * @tags parser converter
  * @param <OUTPUT>
@@ -99,7 +99,7 @@ public abstract class Formatter<OUTPUT> extends BaseOperator implements Converte
 
   /**
    * Get the class that needs to be formatted
-   * 
+   *
    * @return Class<?>
    */
   public Class<?> getClazz()
@@ -109,7 +109,7 @@ public abstract class Formatter<OUTPUT> extends BaseOperator implements Converte
 
   /**
    * Set the class of tuple that needs to be formatted
-   * 
+   *
    * @param clazz
    */
   public void setClazz(Class<?> clazz)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/formatter/JsonFormatter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/formatter/JsonFormatter.java b/library/src/main/java/com/datatorrent/lib/formatter/JsonFormatter.java
index 840b550..a784f89 100644
--- a/library/src/main/java/com/datatorrent/lib/formatter/JsonFormatter.java
+++ b/library/src/main/java/com/datatorrent/lib/formatter/JsonFormatter.java
@@ -29,7 +29,7 @@ import com.datatorrent.api.Context.OperatorContext;
 
 /**
  * Operator that converts POJO to JSON string <br>
- * 
+ *
  * @displayName JsonFormatter
  * @category Formatter
  * @tags pojo json formatter

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/formatter/XmlFormatter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/formatter/XmlFormatter.java b/library/src/main/java/com/datatorrent/lib/formatter/XmlFormatter.java
index 21a7b6a..78dc344 100644
--- a/library/src/main/java/com/datatorrent/lib/formatter/XmlFormatter.java
+++ b/library/src/main/java/com/datatorrent/lib/formatter/XmlFormatter.java
@@ -88,7 +88,7 @@ public class XmlFormatter extends Formatter<String>
    * Gets the alias This is an optional step. Without it XStream would work
    * fine, but the XML element names would contain the fully qualified name of
    * each class (including package) which would bulk up the XML a bit.
-   * 
+   *
    * @return alias.
    */
   public String getAlias()
@@ -100,7 +100,7 @@ public class XmlFormatter extends Formatter<String>
    * Sets the alias This is an optional step. Without it XStream would work
    * fine, but the XML element names would contain the fully qualified name of
    * each class (including package) which would bulk up the XML a bit.
-   * 
+   *
    * @param alias
    *          .
    */
@@ -112,7 +112,7 @@ public class XmlFormatter extends Formatter<String>
   /**
    * Gets the date format e.g dd/mm/yyyy - this will be how a date would be
    * formatted
-   * 
+   *
    * @return dateFormat.
    */
   public String getDateFormat()
@@ -123,7 +123,7 @@ public class XmlFormatter extends Formatter<String>
   /**
    * Sets the date format e.g dd/mm/yyyy - this will be how a date would be
    * formatted
-   * 
+   *
    * @param dateFormat
    *          .
    */
@@ -134,7 +134,7 @@ public class XmlFormatter extends Formatter<String>
 
   /**
    * Returns true if pretty print is enabled.
-   * 
+   *
    * @return prettyPrint
    */
   public boolean isPrettyPrint()
@@ -144,7 +144,7 @@ public class XmlFormatter extends Formatter<String>
 
   /**
    * Sets pretty print option.
-   * 
+   *
    * @param prettyPrint
    */
   public void setPrettyPrint(boolean prettyPrint)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/AbstractFTPInputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/AbstractFTPInputOperator.java b/library/src/main/java/com/datatorrent/lib/io/AbstractFTPInputOperator.java
index 1401100..16d220c 100644
--- a/library/src/main/java/com/datatorrent/lib/io/AbstractFTPInputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/AbstractFTPInputOperator.java
@@ -174,7 +174,7 @@ public abstract class AbstractFTPInputOperator<T> extends AbstractFileInputOpera
 
   /**
    * An {@link AbstractFTPInputOperator} that splits file into lines and emits them.
-   * 
+   *
    * @displayName FTP String Input
    * @category Input
    * @tags ftp

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/block/BlockWriter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/block/BlockWriter.java b/library/src/main/java/com/datatorrent/lib/io/block/BlockWriter.java
index 9e18e1b..64c066b 100644
--- a/library/src/main/java/com/datatorrent/lib/io/block/BlockWriter.java
+++ b/library/src/main/java/com/datatorrent/lib/io/block/BlockWriter.java
@@ -59,7 +59,7 @@ public class BlockWriter extends AbstractFileOutputOperator<AbstractBlockReader.
    * Directory under application directory where blocks gets stored
    */
   private String blocksDirectory = DEFAULT_BLOCKS_DIR;
-  
+
   /**
    * List of FileBlockMetadata received in the current window.
    */
@@ -206,7 +206,7 @@ public class BlockWriter extends AbstractFileOutputOperator<AbstractBlockReader.
       }
     }
   }
-  
+
   /**
    * Directory under application directory where blocks gets stored
    * @return blocks directory
@@ -215,7 +215,7 @@ public class BlockWriter extends AbstractFileOutputOperator<AbstractBlockReader.
   {
     return blocksDirectory;
   }
-  
+
   /**
    * Directory under application directory where blocks gets stored
    * @param blocksDirectory blocks directory
@@ -230,7 +230,7 @@ public class BlockWriter extends AbstractFileOutputOperator<AbstractBlockReader.
   {
 
   }
-  
+
   private static final Logger LOG = LoggerFactory.getLogger(BlockWriter.class);
 
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/block/FSSliceReader.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/block/FSSliceReader.java b/library/src/main/java/com/datatorrent/lib/io/block/FSSliceReader.java
index ad55358..60fd93c 100644
--- a/library/src/main/java/com/datatorrent/lib/io/block/FSSliceReader.java
+++ b/library/src/main/java/com/datatorrent/lib/io/block/FSSliceReader.java
@@ -26,7 +26,7 @@ import com.datatorrent.netlet.util.Slice;
  *
  * @category Input
  * @tags fs
- * 
+ *
  * @since 2.1.0
  */
 @StatsListener.DataQueueSize

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileSplitter.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileSplitter.java b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileSplitter.java
index 7e6bd2f..38c8e96 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileSplitter.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileSplitter.java
@@ -389,7 +389,7 @@ public abstract class AbstractFileSplitter extends BaseOperator
       this.filePath = filePath;
       discoverTime = System.currentTimeMillis();
     }
-    
+
     protected FileMetadata(FileMetadata fileMetadata)
     {
       this();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperator.java
index 3b60d4a..99a8fb6 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/AbstractSingleFileOutputOperator.java
@@ -44,8 +44,8 @@ public abstract class AbstractSingleFileOutputOperator<INPUT> extends AbstractFi
   protected String outputFileName;
 
   /**
-   * partitionedFileName string format specifier 
-      e.g. fileName_physicalPartionId -> %s_%d 
+   * partitionedFileName string format specifier
+      e.g. fileName_physicalPartionId -> %s_%d
    */
   private String partitionedFileNameformat = "%s_%d";
 
@@ -105,17 +105,17 @@ public abstract class AbstractSingleFileOutputOperator<INPUT> extends AbstractFi
   {
     return partitionedFileNameformat;
   }
-  
+
   /**
    * @param partitionedFileNameformat
    *          string format specifier for the partitioned file name. It should have one %s and one %d.
-   *          e.g. fileName_physicalPartionId -> %s_%d 
+   *          e.g. fileName_physicalPartionId -> %s_%d
    */
   public void setPartitionedFileNameformat(String partitionedFileNameformat)
   {
     this.partitionedFileNameformat = partitionedFileNameformat;
   }
-  
+
   /**
    * @return
    * Derived name for file based on physicalPartitionId
@@ -124,5 +124,5 @@ public abstract class AbstractSingleFileOutputOperator<INPUT> extends AbstractFi
   {
     return partitionedFileName;
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/FileMerger.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/FileMerger.java b/library/src/main/java/com/datatorrent/lib/io/fs/FileMerger.java
index 04aa8cf..db7d7c5 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/FileMerger.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/FileMerger.java
@@ -118,7 +118,7 @@ public class FileMerger extends FileStitcher<OutputFileMetadata>
     OutputStream outputStream = outputFS.create(partFilePath);
     return outputStream;
   }
-  
+
   /**
    * Flag to control if existing file with same name should be overwritten
    * @return Flag to control if existing file with same name should be overwritten

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/FileStitcher.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/FileStitcher.java b/library/src/main/java/com/datatorrent/lib/io/fs/FileStitcher.java
index 5f5c717..c4ad9d3 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/FileStitcher.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/FileStitcher.java
@@ -48,7 +48,7 @@ import com.datatorrent.lib.io.fs.Synchronizer.StitchedFileMetaData;
  * This is generic File Stitcher which can be used to merge data from one or
  * more files into single stitched file. StitchedFileMetaData defines
  * constituents of the stitched file.
- * 
+ *
  * This class uses Reconciler to
  *
  * @since 3.4.0
@@ -75,7 +75,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
    * Path for blocks directory
    */
   protected transient String blocksDirectoryPath;
-  
+
   /**
    * Directory under application directory where blocks gets stored
    */
@@ -133,8 +133,8 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
     super.setup(context); // Calling it at the end as the reconciler thread uses resources allocated above.
   }
 
-  /* 
-   * Calls super.endWindow() and sets counters 
+  /*
+   * Calls super.endWindow() and sets counters
    * @see com.datatorrent.api.BaseOperator#endWindow()
    */
   @Override
@@ -146,7 +146,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
       stitchedFileMetaData = doneTuples.peek();
       // If a tuple is present in doneTuples, it has to be also present in successful/failed/skipped
       // as processCommittedData adds tuple in successful/failed/skipped
-      // and then reconciler thread add that in doneTuples 
+      // and then reconciler thread add that in doneTuples
       if (successfulFiles.contains(stitchedFileMetaData)) {
         successfulFiles.remove(stitchedFileMetaData);
         LOG.debug("File copy successful: {}", stitchedFileMetaData.getStitchedFileRelativePath());
@@ -167,7 +167,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
   }
 
   /**
-   * 
+   *
    * @return Application FileSystem instance
    * @throws IOException
    */
@@ -177,7 +177,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
   }
 
   /**
-   * 
+   *
    * @return Destination FileSystem instance
    * @throws IOException
    */
@@ -240,7 +240,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
   /**
    * Read data from block files and write to output file. Information about
    * which block files should be read is specified in outFileMetadata
-   * 
+   *
    * @param stitchedFileMetaData
    * @throws IOException
    */
@@ -287,7 +287,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
 
   /**
    * Writing all Stitch blocks to temporary file
-   * 
+   *
    * @param stitchedFileMetaData
    * @throws IOException
    * @throws BlockNotFoundException
@@ -312,7 +312,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
 
   /**
    * Moving temp output file to final file
-   * 
+   *
    * @param stitchedFileMetaData
    * @throws IOException
    */
@@ -324,7 +324,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
 
   /**
    * Moving temp output file to final file
-   * 
+   *
    * @param tempOutFilePath
    *          Temporary output file
    * @param destination
@@ -351,7 +351,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
       throw new RuntimeException("Unable to move file from " + src + " to " + dst);
     }
   }
-  
+
   /**
    * Directory under application directory where blocks gets stored
    * @return blocks directory
@@ -360,7 +360,7 @@ public class FileStitcher<T extends StitchedFileMetaData> extends AbstractReconc
   {
     return blocksDirectory;
   }
-  
+
   /**
    * Directory under application directory where blocks gets stored
    * @param blocksDirectory blocks directory

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamCodec.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamCodec.java b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamCodec.java
index 1029dff..baf2297 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamCodec.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamCodec.java
@@ -41,7 +41,7 @@ public class FilterStreamCodec
     {
       filterStream = new GZIPOutputStream(outputStream);
     }
-    
+
     @Override
     public void finalizeContext() throws IOException
     {
@@ -80,7 +80,7 @@ public class FilterStreamCodec
   }
 
   /**
-   * This provider is useful when writing to a single output stream so that the same cipher can be reused 
+   * This provider is useful when writing to a single output stream so that the same cipher can be reused
    */
   public static class CipherSimpleStreamProvider implements FilterStreamProvider<CipherOutputStream, OutputStream>
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java
index 35530a3..dd0393a 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamContext.java
@@ -61,7 +61,7 @@ public interface FilterStreamContext<F extends FilterOutputStream>
     }
 
   }
-  
+
   public static class SimpleFilterStreamContext<F extends FilterOutputStream> extends BaseFilterStreamContext<F>
   {
     public SimpleFilterStreamContext(F filterStream)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java
index 75e6e5f..6debaec 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/FilterStreamProvider.java
@@ -39,9 +39,9 @@ import com.google.common.collect.Maps;
 public interface FilterStreamProvider<F extends FilterOutputStream, S extends OutputStream>
 {
   public FilterStreamContext<F> getFilterStreamContext(S outputStream) throws IOException;
-  
+
   public void reclaimFilterStreamContext(FilterStreamContext<F> filterStreamContext);
-  
+
   abstract class SimpleFilterReusableStreamProvider<F extends FilterOutputStream, S extends OutputStream> implements FilterStreamProvider<F, S>
   {
 
@@ -67,7 +67,7 @@ public interface FilterStreamProvider<F extends FilterOutputStream, S extends Ou
         reusableContexts.put(outputStream, filterStreamContext);
       }
     }
-    
+
     protected abstract FilterStreamContext<F> createFilterStreamContext(OutputStream outputStream) throws IOException;
   }
 
@@ -78,7 +78,7 @@ public interface FilterStreamProvider<F extends FilterOutputStream, S extends Ou
   public static class FilterChainStreamProvider<F extends FilterOutputStream, S extends OutputStream> implements FilterStreamProvider<F, S>
   {
     private List<FilterStreamProvider<?,?>> streamProviders = new ArrayList<FilterStreamProvider<?, ?>>();
-    
+
     public Collection<FilterStreamProvider<?,?>> getStreamProviders()
     {
       return Collections.unmodifiableList(streamProviders);
@@ -88,7 +88,7 @@ public interface FilterStreamProvider<F extends FilterOutputStream, S extends Ou
     {
       streamProviders.add(streamProvider);
     }
-    
+
     @Override
     public FilterStreamContext<F> getFilterStreamContext(S outputStream) throws IOException
     {
@@ -120,7 +120,7 @@ public interface FilterStreamProvider<F extends FilterOutputStream, S extends Ou
     private class FilterChainStreamContext extends FilterStreamContext.BaseFilterStreamContext
         implements FilterStreamContext
     {
-      
+
       private List<FilterStreamContext<?>> streamContexts = new ArrayList<FilterStreamContext<?>>();
 
       public void pushStreamContext(FilterStreamContext<?> streamContext)
@@ -128,7 +128,7 @@ public interface FilterStreamProvider<F extends FilterOutputStream, S extends Ou
         streamContexts.add(0, streamContext);
         filterStream = streamContext.getFilterStream();
       }
-      
+
       public Collection<FilterStreamContext<?>> getStreamContexts()
       {
         return Collections.unmodifiableCollection(streamContexts);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileCopyModule.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileCopyModule.java b/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileCopyModule.java
index f4d1a38..5fbc580 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileCopyModule.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileCopyModule.java
@@ -37,9 +37,9 @@ import com.datatorrent.netlet.util.Slice;
  * copy files from any file system to HDFS. This module supports parallel write
  * to multiple blocks of the same file and then stitching those blocks in
  * original sequence.
- * 
+ *
  * Essential operators are wrapped into single component using Module API.
- * 
+ *
  *
  * @since 3.4.0
  */
@@ -108,7 +108,7 @@ public class HDFSFileCopyModule implements Module
   /**
    * Path of the output directory. Relative path of the files copied will be
    * maintained w.r.t. source directory and output directory
-   * 
+   *
    * @return output directory path
    */
   public String getOutputDirectoryPath()
@@ -119,7 +119,7 @@ public class HDFSFileCopyModule implements Module
   /**
    * Path of the output directory. Relative path of the files copied will be
    * maintained w.r.t. source directory and output directory
-   * 
+   *
    * @param outputDirectoryPath
    *          output directory path
    */
@@ -130,7 +130,7 @@ public class HDFSFileCopyModule implements Module
 
   /**
    * Flag to control if existing file with same name should be overwritten
-   * 
+   *
    * @return Flag to control if existing file with same name should be
    *         overwritten
    */
@@ -141,7 +141,7 @@ public class HDFSFileCopyModule implements Module
 
   /**
    * Flag to control if existing file with same name should be overwritten
-   * 
+   *
    * @param overwriteOnConflict
    *          Flag to control if existing file with same name should be
    *          overwritten

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileMerger.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileMerger.java b/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileMerger.java
index 6f72484..cd2bee3 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileMerger.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/HDFSFileMerger.java
@@ -102,7 +102,7 @@ public class HDFSFileMerger extends FileMerger
 
   /**
    * Fast merge using HDFS block concat
-   * 
+   *
    * @param outputFileMetadata
    * @throws IOException
    */
@@ -130,7 +130,7 @@ public class HDFSFileMerger extends FileMerger
   /**
    * Attempt for recovery if block concat is successful but temp file is not
    * moved to final file
-   * 
+   *
    * @param outputFileMetadata
    * @throws IOException
    */
@@ -179,7 +179,7 @@ public class HDFSFileMerger extends FileMerger
     /**
      * Checks if fast merge is possible for given settings for blocks directory,
      * application file system, block size
-     * 
+     *
      * @param outputFileMetadata
      * @throws IOException
      * @throws BlockNotFoundException

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/fs/Synchronizer.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/fs/Synchronizer.java b/library/src/main/java/com/datatorrent/lib/io/fs/Synchronizer.java
index 8632343..a325a2f 100644
--- a/library/src/main/java/com/datatorrent/lib/io/fs/Synchronizer.java
+++ b/library/src/main/java/com/datatorrent/lib/io/fs/Synchronizer.java
@@ -115,7 +115,7 @@ public class Synchronizer extends BaseOperator
   /**
    * Checks if all blocks for given file are received. Sends triggger when all
    * blocks are received.
-   * 
+   *
    * @param fileMetadata
    * @param receivedBlocksMetadata
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java b/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java
index efda6b0..f26ecf4 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/AbstractJMSSinglePortOutputOperator.java
@@ -55,7 +55,7 @@ public abstract class AbstractJMSSinglePortOutputOperator<T> extends AbstractJMS
 {
   @SuppressWarnings("unused")
   private static final Logger logger = LoggerFactory.getLogger(AbstractJMSSinglePortOutputOperator.class);
-  
+
   /**
    * Convert to and send message.
    * @param tuple

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java b/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java
index 772464a..99c2eeb 100644
--- a/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java
+++ b/library/src/main/java/com/datatorrent/lib/io/jms/JMSBase.java
@@ -139,7 +139,7 @@ public class JMSBase
     {
       this.connectionFactoryProperties = connectionFactoryProperties;
     }
-    
+
     /**
      * Get the fully qualified class-name of the connection factory that is used by this
      * builder to instantiate the connection factory
@@ -150,7 +150,7 @@ public class JMSBase
     {
       return connectionFactoryClass;
     }
-    
+
     /**
      * Set the fully qualified class-name of the connection factory that is used by this
      * builder to instantiate the connection factory
@@ -213,7 +213,7 @@ public class JMSBase
   {
     return destination;
   }
-  
+
   /**
    * gets the connection factory class-name used by the default connection factory builder
    *
@@ -244,7 +244,7 @@ public class JMSBase
     }
     return (DefaultConnectionFactoryBuilder)connectionFactoryBuilder;
   }
-  
+
   /**
    * Sets the connection factory class-name used by the default connection factory builder
    *

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java b/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java
index 5f09a4b..e403979 100644
--- a/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java
+++ b/library/src/main/java/com/datatorrent/lib/math/AbstractAggregateCalc.java
@@ -42,7 +42,7 @@ import com.datatorrent.api.DefaultInputPort;
  * <b>integerResult</b>: emits Integer<br>
  * <b>longResult</b>: emits Long<br>
  * <br>
- * 
+ *
  * @displayName Abstract Aggregate Calculator
  * @category Math
  * @tags aggregate, collection

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java b/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java
index 91cc9ba..f7283e0 100644
--- a/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java
+++ b/library/src/main/java/com/datatorrent/lib/math/AbstractXmlKeyValueCartesianProduct.java
@@ -22,9 +22,9 @@ import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 
 /**
- * This operator extends the AbstractXmlCartesianProduct operator and implements the node value 
+ * This operator extends the AbstractXmlCartesianProduct operator and implements the node value
  * as a key value pair of node name and the node's text value.
- * 
+ *
  * @displayName Abstract XML Key Value Cartesian Product
  * @category Math
  * @tags cartesian product, xml, multiple products, key value

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/Division.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Division.java b/library/src/main/java/com/datatorrent/lib/math/Division.java
index d05af18..f5a01aa 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Division.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Division.java
@@ -58,17 +58,17 @@ public class Division extends BaseOperator
    * Array to store numerator inputs during window.
    */
   private ArrayList<Number> numer = new ArrayList<Number>();
-  
+
   /**
    * Array to store denominator input during window.
    */
   private ArrayList<Number> denom = new ArrayList<Number>();
-  
+
   /**
    * Number of pair processed in current window.
    */
   private int index = 0;
-  
+
   /**
    * Numerator input port.
    */
@@ -112,55 +112,55 @@ public class Division extends BaseOperator
       }
     }
   };
- 
+
   /**
-   * Long quotient output port. 
+   * Long quotient output port.
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Long> longQuotient = new DefaultOutputPort<Long>();
-  
+
   /**
-   * Integer quotient output port. 
+   * Integer quotient output port.
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Integer> integerQuotient = new DefaultOutputPort<Integer>();
-  
+
   /**
-   * Double quotient output port. 
+   * Double quotient output port.
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Double> doubleQuotient = new DefaultOutputPort<Double>();
 
   /**
-   * Float quotient output port. 
+   * Float quotient output port.
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Float> floatQuotient = new DefaultOutputPort<Float>();
-  
+
   /**
-   * Long remainder output port. 
+   * Long remainder output port.
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Long> longRemainder = new DefaultOutputPort<Long>();
-  
+
   /**
-   * Integer remainder output port. 
+   * Integer remainder output port.
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Integer> integerRemainder = new DefaultOutputPort<Integer>();
- 
+
   /**
-   * Double remainder output port. 
+   * Double remainder output port.
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Double> doubleRemainder = new DefaultOutputPort<Double>();
-  
+
   /**
-   * Float remainder output port. 
+   * Float remainder output port.
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<Float> floatRemainder = new DefaultOutputPort<Float>();
-  
+
   /**
    * Error data output port that emits a string.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/Margin.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Margin.java b/library/src/main/java/com/datatorrent/lib/math/Margin.java
index 94e15d6..5d1872e 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Margin.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Margin.java
@@ -24,7 +24,7 @@ import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.lib.util.BaseNumberValueOperator;
 
 /**
- * This operator sums the division of numerator and denominator value arriving at input ports. 
+ * This operator sums the division of numerator and denominator value arriving at input ports.
  * <p>
  * <br>
  * Margin Formula used by this operator: 1 - numerator/denominator.<br>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/MarginMap.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/MarginMap.java b/library/src/main/java/com/datatorrent/lib/math/MarginMap.java
index 7ef1f81..d1fa33f 100644
--- a/library/src/main/java/com/datatorrent/lib/math/MarginMap.java
+++ b/library/src/main/java/com/datatorrent/lib/math/MarginMap.java
@@ -54,7 +54,7 @@ public class MarginMap<K, V extends Number> extends BaseNumberKeyValueOperator<K
 {
   /**
    * Numerator input port that takes a map.
-   */  
+   */
   public final transient DefaultInputPort<Map<K, V>> numerator = new DefaultInputPort<Map<K, V>>()
   {
     /**
@@ -66,7 +66,7 @@ public class MarginMap<K, V extends Number> extends BaseNumberKeyValueOperator<K
       addTuple(tuple, numerators);
     }
   };
-  
+
   /**
    * Denominator input port that takes a map.
    */
@@ -101,7 +101,7 @@ public class MarginMap<K, V extends Number> extends BaseNumberKeyValueOperator<K
       val.add(e.getValue().doubleValue());
     }
   }
-  
+
   /*
    * Output margin port that emits hashmap.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/Min.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Min.java b/library/src/main/java/com/datatorrent/lib/math/Min.java
index 4b3fa23..a862f2e 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Min.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Min.java
@@ -45,10 +45,10 @@ public class Min<V extends Number> extends BaseNumberValueOperator<V> implements
    * Computed low value.
    */
   protected V low;
-  
+
   // transient field
   protected boolean flag = false;
-  
+
   /**
    * Input port that takes a number and compares to min and stores the new min.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java b/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java
index dd56f7f..5396768 100644
--- a/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java
+++ b/library/src/main/java/com/datatorrent/lib/math/MultiplyByConstant.java
@@ -26,7 +26,7 @@ import com.datatorrent.common.util.BaseOperator;
 /**
  * Multiplies input tuple (Number) by the value of property "multiplier" and emits the result on respective ports.
  * <p>
- * This operator emits the result as Long on port "longProduct", as Integer on port "integerProduct", as Double on port "doubleProduct", and as Float on port "floatProduct". 
+ * This operator emits the result as Long on port "longProduct", as Integer on port "integerProduct", as Double on port "doubleProduct", and as Float on port "floatProduct".
  * Output is computed in current window.No state dependency among input tuples
  * This is a pass through operator
  * <br>
@@ -79,22 +79,22 @@ public class MultiplyByConstant extends BaseOperator
     }
 
   };
-  
+
   /**
    * Long output port.
    */
   public final transient DefaultOutputPort<Long> longProduct = new DefaultOutputPort<Long>();
-  
+
   /**
    * Integer output port.
    */
   public final transient DefaultOutputPort<Integer> integerProduct = new DefaultOutputPort<Integer>();
-  
+
   /**
    * Double output port.
    */
   public final transient DefaultOutputPort<Double> doubleProduct = new DefaultOutputPort<Double>();
-  
+
   /**
    * Float output port.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java b/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java
index 286d72e..163f06b 100644
--- a/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java
+++ b/library/src/main/java/com/datatorrent/lib/math/RunningAverage.java
@@ -24,7 +24,7 @@ import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.common.util.BaseOperator;
 
 /**
- * Calculate the running average of the input numbers and emit it at the end of the window. 
+ * Calculate the running average of the input numbers and emit it at the end of the window.
  * <p>
  * This is an end of window operator.<br>
  * <br>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/Sigma.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Sigma.java b/library/src/main/java/com/datatorrent/lib/math/Sigma.java
index 6bfb9cf..1a9df60 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Sigma.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Sigma.java
@@ -26,7 +26,7 @@ import com.datatorrent.api.annotation.OperatorAnnotation;
  * Adds incoming tuple to the state and emits the result of each addition on the respective ports.
  * <p>
  * The addition would go on forever.Result is emitted on four different data type ports:floatResult,integerResult,longResult,doubleResult.
- * Input tuple object has to be an implementation of the interface Collection.Tuples are emitted on the output ports only if they are connected. 
+ * Input tuple object has to be an implementation of the interface Collection.Tuples are emitted on the output ports only if they are connected.
  * This is done to avoid the cost of calling the functions when some ports are not connected.
  * This is a stateful pass through operator<br>
  * <b>Partitions : </b>, no will yield wrong results, no unifier on output port.

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/SingleVariableAbstractCalculus.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/SingleVariableAbstractCalculus.java b/library/src/main/java/com/datatorrent/lib/math/SingleVariableAbstractCalculus.java
index d97b905..9569074 100644
--- a/library/src/main/java/com/datatorrent/lib/math/SingleVariableAbstractCalculus.java
+++ b/library/src/main/java/com/datatorrent/lib/math/SingleVariableAbstractCalculus.java
@@ -23,7 +23,7 @@ import com.datatorrent.api.DefaultInputPort;
 /**
  * Transforms the input into the output after applying appropriate mathematical function to it and emits result on respective ports.
  * <p>
- * Emits the result as Long on port "longResult", as Integer on port "integerResult",as Double on port "doubleResult", and as Float on port "floatResult". 
+ * Emits the result as Long on port "longResult", as Integer on port "integerResult",as Double on port "doubleResult", and as Float on port "floatResult".
  * This is a pass through operator<br>
  * <br>
  * <b>Ports</b>:<br>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/Sum.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/Sum.java b/library/src/main/java/com/datatorrent/lib/math/Sum.java
index 0f5e64f..0214268 100644
--- a/library/src/main/java/com/datatorrent/lib/math/Sum.java
+++ b/library/src/main/java/com/datatorrent/lib/math/Sum.java
@@ -29,7 +29,7 @@ import com.datatorrent.lib.util.BaseNumberValueOperator;
 import com.datatorrent.lib.util.UnifierSumNumber;
 
 /**
- * This operator implements Unifier interface and emits the sum of values at the end of window. 
+ * This operator implements Unifier interface and emits the sum of values at the end of window.
  * <p>
  * This is an end of window operator. Application can turn this into accumulated
  * sum operator by setting cumulative flag to true. <br>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java b/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java
index 7f36ef5..cc50fe1 100644
--- a/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java
+++ b/library/src/main/java/com/datatorrent/lib/math/XmlKeyValueStringCartesianProduct.java
@@ -28,7 +28,7 @@ import com.datatorrent.api.DefaultOutputPort;
 /**
  * An implementation of the AbstractXmlKeyValueCartesianProduct operator that takes in the xml document
  * as a String input and outputs the cartesian product as Strings.
- * 
+ *
  * @displayName Xml Key Value String Cartesian Product
  * @category Math
  * @tags cartesian product, string, xml
@@ -38,7 +38,7 @@ public class XmlKeyValueStringCartesianProduct extends AbstractXmlKeyValueCartes
 {
 
   InputSource source = new InputSource();
-  
+
   /**
    * Output port that emits cartesian product as Strings.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/math/package-info.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/math/package-info.java b/library/src/main/java/com/datatorrent/lib/math/package-info.java
index f583662..c22309b 100644
--- a/library/src/main/java/com/datatorrent/lib/math/package-info.java
+++ b/library/src/main/java/com/datatorrent/lib/math/package-info.java
@@ -22,10 +22,10 @@
  * Most of the arithmetic operators come in three types based on their schema.
  * The operators whose names ends with "Map" (eg SumMap, MaxMap, MinMap) take in Map on input ports and emit HashMap. These operators use
  * round robin partitioning and would merge as per their functionality.
- * <br>  
+ * <br>
  * The operators whose names ends with "KeyVal" (eg SumKeyVal, MaxKeyVal, MinKeyVal) take in KeyValPair and emit KeyValPair. These operators use
  * sticky key partitioning and would merge using default pass through merge operator.
- * <br>  
+ * <br>
  * The operators whose names are just their function name (eg Sum, Min, Max) operate on same objects and emit a final result. These operators have no keys.
  * They partition in roundrobin and would merge as per their functionality.
  * <br>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/main/java/com/datatorrent/lib/parser/Parser.java
----------------------------------------------------------------------
diff --git a/library/src/main/java/com/datatorrent/lib/parser/Parser.java b/library/src/main/java/com/datatorrent/lib/parser/Parser.java
index 4f591f1..0403dc9 100644
--- a/library/src/main/java/com/datatorrent/lib/parser/Parser.java
+++ b/library/src/main/java/com/datatorrent/lib/parser/Parser.java
@@ -40,7 +40,7 @@ import com.datatorrent.lib.converter.Converter;
  * <b>err</b>: emits &lt;INPUT&gt; error port that emits input tuple that could
  * not be converted<br>
  * <br>
- * 
+ *
  * @displayName Parser
  * @tags parser converter
  * @param <INPUT>
@@ -108,7 +108,7 @@ public abstract class Parser<INPUT, ERROROUT> extends BaseOperator implements Co
 
   /**
    * Get the class that needs to be formatted
-   * 
+   *
    * @return Class<?>
    */
   public Class<?> getClazz()
@@ -118,7 +118,7 @@ public abstract class Parser<INPUT, ERROROUT> extends BaseOperator implements Co
 
   /**
    * Set the class of tuple that needs to be formatted
-   * 
+   *
    * @param clazz
    */
   public void setClazz(Class<?> clazz)


[6/6] apex-malhar git commit: Fix trailing whitespace.

Posted by vr...@apache.org.
Fix trailing whitespace.


Project: http://git-wip-us.apache.org/repos/asf/apex-malhar/repo
Commit: http://git-wip-us.apache.org/repos/asf/apex-malhar/commit/763d14fc
Tree: http://git-wip-us.apache.org/repos/asf/apex-malhar/tree/763d14fc
Diff: http://git-wip-us.apache.org/repos/asf/apex-malhar/diff/763d14fc

Branch: refs/heads/master
Commit: 763d14fca6b84fdda1b6853235e5d4b71ca87fca
Parents: 90b5c9b
Author: CI Support <je...@datatorrent.com>
Authored: Mon Sep 26 20:36:22 2016 -0700
Committer: CI Support <je...@datatorrent.com>
Committed: Mon Sep 26 20:36:22 2016 -0700

----------------------------------------------------------------------
 .../LogstreamWidgetOutputOperator.java          |  2 +-
 .../benchmark/CouchBaseAppOutput.java           |  2 +-
 ...nchmarkPartitionableKafkaOutputOperator.java |  2 +-
 .../state/ManagedStateBenchmarkApp.java         |  2 +-
 .../benchmark/state/StoreOperator.java          | 28 ++++----
 .../state/ManagedStateBenchmarkAppTester.java   | 14 ++--
 .../contrib/avro/AvroFileInputOperator.java     |  6 +-
 .../datatorrent/contrib/avro/AvroToPojo.java    |  6 +-
 .../datatorrent/contrib/avro/PojoToAvro.java    | 10 +--
 .../AbstractElasticSearchOutputOperator.java    |  2 +-
 .../elasticsearch/ElasticSearchConnectable.java |  6 +-
 .../ElasticSearchMapInputOperator.java          |  6 +-
 .../ElasticSearchMapOutputOperator.java         | 10 +--
 .../ElasticSearchPercolatorStore.java           | 12 ++--
 .../contrib/enrich/DelimitedFSLoader.java       |  4 +-
 .../datatorrent/contrib/enrich/FSLoader.java    |  2 +-
 .../contrib/enrich/FixedWidthFSLoader.java      | 10 +--
 .../contrib/formatter/CsvFormatter.java         |  6 +-
 .../geode/AbstractGeodeInputOperator.java       |  4 +-
 .../geode/AbstractGeodeOutputOperator.java      |  4 +-
 .../contrib/geode/GeodeCheckpointStore.java     | 18 +++---
 .../geode/GeodeKeyValueStorageAgent.java        |  2 +-
 .../contrib/geode/GeodePOJOOutputOperator.java  |  2 +-
 .../datatorrent/contrib/geode/GeodeStore.java   |  6 +-
 .../contrib/geode/RegionCreateFunction.java     |  2 +-
 .../contrib/hbase/HBaseFieldInfo.java           | 34 +++++-----
 .../kafka/AbstractKafkaInputOperator.java       |  6 +-
 .../contrib/kafka/HighlevelKafkaConsumer.java   |  8 +--
 .../contrib/kafka/KafkaPartition.java           | 16 ++---
 .../contrib/kafka/OffsetManager.java            |  4 +-
 .../contrib/kafka/SimpleKafkaConsumer.java      | 10 +--
 .../kinesis/AbstractKinesisOutputOperator.java  | 12 ++--
 .../contrib/memcache/MemcacheStore.java         |  2 +-
 .../contrib/mqtt/MqttClientConfig.java          |  2 +-
 .../parquet/AbstractParquetFileReader.java      |  6 +-
 .../contrib/parquet/ParquetFilePOJOReader.java  | 10 +--
 .../contrib/parser/CellProcessorBuilder.java    | 34 +++++-----
 .../datatorrent/contrib/parser/CsvParser.java   | 12 ++--
 .../contrib/parser/DelimitedSchema.java         | 26 ++++----
 .../datatorrent/contrib/parser/JsonParser.java  | 16 ++---
 .../contrib/r/REngineConnectable.java           |  8 +--
 .../java/com/datatorrent/contrib/r/RScript.java |  8 +--
 .../rabbitmq/AbstractRabbitMQInputOperator.java | 30 ++++-----
 .../AbstractRabbitMQOutputOperator.java         | 14 ++--
 .../rabbitmq/RabbitMQOutputOperator.java        |  4 +-
 .../redis/AbstractRedisInputOperator.java       |  4 +-
 .../redis/RedisKeyValueInputOperator.java       |  2 +-
 .../redis/RedisMapAsValueInputOperator.java     |  4 +-
 .../datatorrent/contrib/redis/RedisStore.java   |  6 +-
 .../solr/AbstractSolrOutputOperator.java        |  2 +-
 .../ConcurrentUpdateSolrServerConnector.java    |  2 +-
 .../datatorrent/contrib/splunk/SplunkStore.java |  2 +-
 .../contrib/zmq/ZeroMQInputOperator.java        |  2 +-
 .../apex/malhar/contrib/misc/math/Change.java   |  4 +-
 .../contrib/misc/math/CompareExceptMap.java     |  4 +-
 .../malhar/contrib/misc/math/ExceptMap.java     |  2 +-
 .../apex/malhar/contrib/misc/math/Quotient.java |  2 +-
 .../malhar/contrib/misc/math/QuotientMap.java   |  2 +-
 .../malhar/contrib/misc/math/SumCountMap.java   |  6 +-
 .../contrib/parser/StreamingJsonParser.java     |  8 +--
 .../contrib/couchbase/CouchBaseSetTest.java     |  2 +-
 .../ElasticSearchOperatorTest.java              |  6 +-
 .../ElasticSearchPercolateTest.java             | 10 +--
 .../contrib/geode/GeodeCheckpointStoreTest.java |  2 +-
 .../hbase/HBasePOJOInputOperatorTest.java       | 34 +++++-----
 .../contrib/hbase/HBasePOJOPutOperatorTest.java | 36 +++++------
 .../HBaseTransactionalPutOperatorTest.java      | 12 ++--
 .../datatorrent/contrib/hbase/HBaseUtil.java    |  8 +--
 .../contrib/helper/MessageQueueTestHelper.java  |  2 +-
 .../KafkaExactlyOnceOutputOperatorTest.java     | 20 +++---
 .../contrib/kafka/KafkaTestPartitioner.java     |  2 +-
 .../contrib/kafka/KafkaTestProducer.java        |  4 +-
 .../kinesis/KinesisOperatorTestBase.java        | 12 ++--
 .../kinesis/KinesisOutputOperatorTest.java      | 16 ++---
 .../KinesisStringOutputOperatorTest.java        |  4 +-
 .../contrib/kinesis/KinesisTestConsumer.java    | 26 ++++----
 .../memcache/MemcachePOJOOperatorTest.java      | 20 +++---
 .../memsql/AbstractMemsqlInputOperatorTest.java |  2 +-
 .../RabbitMQOutputOperatorBenchmark.java        |  2 +-
 .../contrib/redis/RedisInputOperatorTest.java   |  2 +-
 .../contrib/splunk/SplunkInputOperatorTest.java |  2 +-
 .../splunk/SplunkTcpOutputOperatorTest.java     |  2 +-
 .../util/FieldValueSerializableGenerator.java   | 20 +++---
 .../contrib/util/POJOTupleGenerateOperator.java | 30 ++++-----
 .../com/datatorrent/contrib/util/TestPOJO.java  | 22 +++----
 .../contrib/util/TupleCacheOutputOperator.java  | 14 ++--
 .../util/TupleGenerateCacheOperator.java        |  8 +--
 .../contrib/util/TupleGenerator.java            | 20 +++---
 .../contrib/zmq/ZeroMQInputOperatorTest.java    |  2 +-
 .../contrib/zmq/ZeroMQMessageGenerator.java     |  6 +-
 .../contrib/zmq/ZeroMQMessageReceiver.java      |  8 +--
 .../contrib/zmq/ZeroMQOutputOperatorTest.java   |  4 +-
 .../streamquery/FullOuterJoinOperatorTest.java  |  6 +-
 .../misc/streamquery/GroupByOperatorTest.java   |  2 +-
 .../misc/streamquery/HavingOperatorTest.java    |  2 +-
 .../misc/streamquery/InnerJoinOperatorTest.java |  2 +-
 .../streamquery/LeftOuterJoinOperatorTest.java  |  6 +-
 .../streamquery/RightOuterJoinOperatorTest.java |  8 +--
 .../misc/streamquery/SelectTopOperatorTest.java |  6 +-
 .../advanced/BetweenConditionTest.java          |  2 +-
 .../advanced/CompoundConditionTest.java         |  2 +-
 .../streamquery/advanced/InConditionTest.java   |  2 +-
 .../demos/machinedata/data/AverageData.java     |  2 +-
 .../demos/machinedata/data/MachineInfo.java     | 20 +++---
 .../demos/mobile/PhoneEntryOperator.java        | 10 +--
 .../datatorrent/demos/pi/CalculatorTest.java    |  2 +-
 .../hive/AbstractFSRollingOutputOperator.java   |  2 +-
 .../datatorrent/contrib/hive/HiveOperator.java  | 14 ++--
 .../apex/malhar/hive/HiveOutputModule.java      | 52 +++++++--------
 .../malhar/kafka/AbstractKafkaPartitioner.java  | 12 ++--
 .../apex/malhar/kafka/KafkaConsumerWrapper.java |  4 +-
 .../malhar/kafka/KafkaInputOperatorTest.java    | 44 ++++++-------
 .../apex/malhar/kafka/KafkaTestPartitioner.java |  2 +-
 .../apex/malhar/kafka/KafkaTestProducer.java    |  8 +--
 .../lib/appdata/gpo/SerdeMapPrimitive.java      |  4 +-
 .../lib/appdata/query/WindowBoundedService.java |  2 +-
 .../schemas/DimensionalConfigurationSchema.java |  2 +-
 .../lib/appdata/schemas/DimensionalSchema.java  | 14 ++--
 .../datatorrent/lib/appdata/schemas/Schema.java |  2 +-
 .../lib/appdata/schemas/SnapshotSchema.java     |  2 +-
 .../snapshot/AbstractAppDataSnapshotServer.java | 18 +++---
 .../lib/bandwidth/BandwidthPartitioner.java     |  4 +-
 .../com/datatorrent/lib/codec/package-info.java |  2 +-
 .../datatorrent/lib/converter/Converter.java    |  4 +-
 .../com/datatorrent/lib/db/KeyValueStore.java   |  2 +-
 .../db/jdbc/AbstractJdbcPOJOOutputOperator.java |  2 +-
 .../db/jdbc/AbstractJdbcPollInputOperator.java  |  6 +-
 .../lib/db/jdbc/JdbcPollInputOperator.java      |  2 +-
 .../datatorrent/lib/fileaccess/TFileImpl.java   | 28 ++++----
 .../datatorrent/lib/fileaccess/TFileWriter.java |  6 +-
 .../datatorrent/lib/filter/FilterOperator.java  |  4 +-
 .../datatorrent/lib/formatter/Formatter.java    |  6 +-
 .../lib/formatter/JsonFormatter.java            |  2 +-
 .../datatorrent/lib/formatter/XmlFormatter.java | 12 ++--
 .../lib/io/AbstractFTPInputOperator.java        |  2 +-
 .../datatorrent/lib/io/block/BlockWriter.java   |  8 +--
 .../datatorrent/lib/io/block/FSSliceReader.java |  2 +-
 .../lib/io/fs/AbstractFileSplitter.java         |  2 +-
 .../io/fs/AbstractSingleFileOutputOperator.java | 12 ++--
 .../com/datatorrent/lib/io/fs/FileMerger.java   |  2 +-
 .../com/datatorrent/lib/io/fs/FileStitcher.java | 26 ++++----
 .../lib/io/fs/FilterStreamCodec.java            |  4 +-
 .../lib/io/fs/FilterStreamContext.java          |  2 +-
 .../lib/io/fs/FilterStreamProvider.java         | 14 ++--
 .../lib/io/fs/HDFSFileCopyModule.java           | 12 ++--
 .../datatorrent/lib/io/fs/HDFSFileMerger.java   |  6 +-
 .../com/datatorrent/lib/io/fs/Synchronizer.java |  2 +-
 .../AbstractJMSSinglePortOutputOperator.java    |  2 +-
 .../com/datatorrent/lib/io/jms/JMSBase.java     |  8 +--
 .../lib/math/AbstractAggregateCalc.java         |  2 +-
 .../AbstractXmlKeyValueCartesianProduct.java    |  4 +-
 .../java/com/datatorrent/lib/math/Division.java | 38 +++++------
 .../java/com/datatorrent/lib/math/Margin.java   |  2 +-
 .../com/datatorrent/lib/math/MarginMap.java     |  6 +-
 .../main/java/com/datatorrent/lib/math/Min.java |  4 +-
 .../lib/math/MultiplyByConstant.java            | 10 +--
 .../datatorrent/lib/math/RunningAverage.java    |  2 +-
 .../java/com/datatorrent/lib/math/Sigma.java    |  2 +-
 .../math/SingleVariableAbstractCalculus.java    |  2 +-
 .../main/java/com/datatorrent/lib/math/Sum.java |  2 +-
 .../math/XmlKeyValueStringCartesianProduct.java |  4 +-
 .../com/datatorrent/lib/math/package-info.java  |  4 +-
 .../java/com/datatorrent/lib/parser/Parser.java |  6 +-
 .../com/datatorrent/lib/parser/XmlParser.java   |  2 +-
 .../lib/projection/ProjectionOperator.java      |  2 +-
 .../datatorrent/lib/script/ScriptOperator.java  |  8 +--
 .../lib/util/AbstractKeyValueStorageAgent.java  | 20 +++---
 .../lib/util/StorageAgentKeyValueStore.java     | 10 +--
 .../com/datatorrent/lib/util/TableInfo.java     |  4 +-
 .../java/com/datatorrent/lib/util/TopNSort.java |  2 +-
 .../com/datatorrent/lib/util/package-info.java  |  2 +-
 .../malhar/lib/dedup/BoundedDedupOperator.java  | 14 ++--
 .../aggregator/AbstractCompositeAggregator.java | 14 ++--
 .../AbstractCompositeAggregatorFactory.java     |  2 +-
 .../AbstractIncrementalAggregator.java          |  2 +-
 .../aggregator/AbstractTopBottomAggregator.java | 41 ++++++------
 .../aggregator/AggregatorRegistry.java          | 24 +++----
 .../dimensions/aggregator/AggregatorUtils.java  |  2 +-
 .../aggregator/CompositeAggregator.java         | 10 +--
 .../aggregator/CompositeAggregatorFactory.java  |  4 +-
 .../DefaultCompositeAggregatorFactory.java      |  8 +--
 .../aggregator/TopBottomAggregatorFactory.java  | 14 ++--
 .../apex/malhar/lib/fs/FSRecordReader.java      | 10 +--
 .../malhar/lib/fs/FSRecordReaderModule.java     | 22 +++----
 .../managed/IncrementalCheckpointManager.java   |  2 +-
 .../state/spillable/TimeBasedPriorityQueue.java |  6 +-
 .../malhar/lib/wal/FSWindowDataManager.java     | 12 ++--
 .../apex/malhar/lib/wal/FSWindowReplayWAL.java  |  4 +-
 .../apex/malhar/lib/wal/FileSystemWAL.java      |  2 +-
 .../malhar/lib/window/accumulation/Average.java |  8 +--
 .../malhar/lib/window/accumulation/Group.java   |  8 +--
 .../malhar/lib/window/accumulation/Max.java     | 14 ++--
 .../malhar/lib/window/accumulation/Min.java     | 14 ++--
 .../window/accumulation/RemoveDuplicates.java   |  8 +--
 .../lib/window/accumulation/SumDouble.java      |  8 +--
 .../lib/window/accumulation/SumFloat.java       |  8 +--
 .../malhar/lib/window/accumulation/SumInt.java  |  8 +--
 .../malhar/lib/window/accumulation/SumLong.java |  8 +--
 .../apache/hadoop/io/file/tfile/DTBCFile.java   | 68 ++++++++++----------
 .../tfile/ReusableByteArrayInputStream.java     |  8 +--
 .../lib/algo/BottomNUnifierTest.java            |  6 +-
 .../MapToKeyValuePairConverterTest.java         | 14 ++--
 .../StringValueToNumberConverterForMapTest.java | 14 ++--
 .../lib/db/cache/CacheStoreTest.java            |  4 +-
 .../datatorrent/lib/db/jdbc/JdbcIOAppTest.java  |  2 +-
 .../lib/db/jdbc/JdbcOperatorTest.java           |  6 +-
 .../com/datatorrent/lib/filter/FilterTest.java  |  8 +--
 .../lib/formatter/XmlFormatterTest.java         |  2 +-
 .../io/fs/AbstractFileInputOperatorTest.java    |  2 +-
 .../io/fs/AbstractFileOutputOperatorTest.java   | 24 +++----
 .../AbstractSingleFileOutputOperatorTest.java   |  2 +-
 .../lib/io/fs/FastMergerDecisionMakerTest.java  | 12 ++--
 .../lib/io/fs/FileSplitterInputTest.java        | 24 +++----
 .../datatorrent/lib/io/fs/SynchronizerTest.java |  2 +-
 .../lib/io/fs/TailFsInputOperatorTest.java      |  4 +-
 .../logs/FilteredLineToTokenHashMapTest.java    |  2 +-
 .../lib/logs/LineToTokenArrayListTest.java      |  4 +-
 .../lib/logs/LineToTokenHashMapTest.java        |  2 +-
 .../lib/logs/LineTokenizerKeyValTest.java       |  2 +-
 .../MultiWindowDimensionAggregationTest.java    |  2 +-
 .../com/datatorrent/lib/math/MarginMapTest.java |  4 +-
 .../com/datatorrent/lib/math/SigmaTest.java     |  2 +-
 .../lib/multiwindow/SortedMovingWindowTest.java | 32 ++++-----
 .../datatorrent/lib/parser/XmlParserTest.java   |  2 +-
 .../lib/statistics/MeridianOperatorTest.java    |  2 +-
 .../lib/statistics/ModeOperatorTest.java        |  2 +-
 .../StandardDeviationOperatorTest.java          |  4 +-
 .../statistics/WeightedMeanOperatorTest.java    |  2 +-
 .../lib/stream/DevNullCounterTest.java          |  4 +-
 .../com/datatorrent/lib/stream/DevNullTest.java |  2 +-
 .../lib/testbench/ActiveMQMessageGenerator.java |  2 +-
 .../lib/testbench/RandomEventGeneratorTest.java |  2 +-
 .../com/datatorrent/lib/util/TestUtils.java     |  2 +-
 .../lib/dedup/DeduperPartitioningTest.java      |  4 +-
 .../apex/malhar/lib/fs/FSRecordReaderTest.java  |  4 +-
 .../lib/fs/GenericFileOutputOperatorTest.java   |  6 +-
 .../SpillableByteArrayListMultimapImplTest.java |  6 +-
 .../SpillableComplexComponentImplTest.java      |  2 +-
 .../spillable/SpillableSetMultimapImplTest.java |  6 +-
 .../malhar/lib/wal/FSWindowDataManagerTest.java | 14 ++--
 .../lib/window/accumulation/AverageTest.java    |  2 +-
 .../lib/window/accumulation/FoldFnTest.java     | 26 ++++----
 .../lib/window/accumulation/GroupTest.java      |  2 +-
 .../malhar/lib/window/accumulation/MaxTest.java |  6 +-
 .../malhar/lib/window/accumulation/MinTest.java |  6 +-
 .../lib/window/accumulation/ReduceFnTest.java   |  6 +-
 .../accumulation/RemoveDuplicatesTest.java      |  2 +-
 .../malhar/lib/window/accumulation/SumTest.java |  8 +--
 .../lib/window/accumulation/TopNByKeyTest.java  | 26 ++++----
 .../apache/hadoop/io/file/tfile/TestDTFile.java |  4 +-
 .../io/file/tfile/TestDTFileByteArrays.java     |  4 +-
 .../io/file/tfile/TestTFileComparator2.java     |  8 +--
 .../io/file/tfile/TestTFileComparators.java     |  4 +-
 .../TestTFileJClassComparatorByteArrays.java    |  6 +-
 .../file/tfile/TestTFileLzoCodecsStreams.java   |  2 +-
 ...ileNoneCodecsJClassComparatorByteArrays.java |  4 +-
 .../hadoop/io/file/tfile/TestTFileSeek.java     | 22 +++----
 .../file/tfile/TestTFileSeqFileComparison.java  |  2 +-
 .../hadoop/io/file/tfile/TestTFileSplit.java    | 12 ++--
 .../hadoop/io/file/tfile/TestTFileStreams.java  |  6 +-
 .../FunctionOperator/FunctionOperatorTest.java  | 12 ++--
 261 files changed, 1092 insertions(+), 1091 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/apps/logstream/src/main/java/com/datatorrent/apps/logstream/LogstreamWidgetOutputOperator.java
----------------------------------------------------------------------
diff --git a/apps/logstream/src/main/java/com/datatorrent/apps/logstream/LogstreamWidgetOutputOperator.java b/apps/logstream/src/main/java/com/datatorrent/apps/logstream/LogstreamWidgetOutputOperator.java
index 1b94532..29c92e6 100644
--- a/apps/logstream/src/main/java/com/datatorrent/apps/logstream/LogstreamWidgetOutputOperator.java
+++ b/apps/logstream/src/main/java/com/datatorrent/apps/logstream/LogstreamWidgetOutputOperator.java
@@ -128,7 +128,7 @@ public class LogstreamWidgetOutputOperator extends WidgetOutputOperator
     {
       @SuppressWarnings("unchecked")
       HashMap<String, Object>[] result = (HashMap<String, Object>[])Array.newInstance(HashMap.class, topNMap.size());
-      
+
       int j = 0;
       for (Entry<String, Number> e : topNMap.entrySet()) {
         result[j] = new HashMap<String, Object>();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/benchmark/src/main/java/com/datatorrent/benchmark/CouchBaseAppOutput.java
----------------------------------------------------------------------
diff --git a/benchmark/src/main/java/com/datatorrent/benchmark/CouchBaseAppOutput.java b/benchmark/src/main/java/com/datatorrent/benchmark/CouchBaseAppOutput.java
index 1a24984..f789d08 100644
--- a/benchmark/src/main/java/com/datatorrent/benchmark/CouchBaseAppOutput.java
+++ b/benchmark/src/main/java/com/datatorrent/benchmark/CouchBaseAppOutput.java
@@ -29,7 +29,7 @@ import com.datatorrent.api.annotation.ApplicationAnnotation;
  *
  * Application to benchmark the performance of couchbase output operator.
  * The number of tuples processed per second were around 20,000.
- * 
+ *
  * @since 2.0.0
  */
 @ApplicationAnnotation(name = "CouchBaseAppOutput")

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/benchmark/src/main/java/com/datatorrent/benchmark/kafka/BenchmarkPartitionableKafkaOutputOperator.java
----------------------------------------------------------------------
diff --git a/benchmark/src/main/java/com/datatorrent/benchmark/kafka/BenchmarkPartitionableKafkaOutputOperator.java b/benchmark/src/main/java/com/datatorrent/benchmark/kafka/BenchmarkPartitionableKafkaOutputOperator.java
index c60e99a..1126ac1 100644
--- a/benchmark/src/main/java/com/datatorrent/benchmark/kafka/BenchmarkPartitionableKafkaOutputOperator.java
+++ b/benchmark/src/main/java/com/datatorrent/benchmark/kafka/BenchmarkPartitionableKafkaOutputOperator.java
@@ -55,7 +55,7 @@ public class BenchmarkPartitionableKafkaOutputOperator implements Partitioner<Be
 {
 
   private static final Logger logger = LoggerFactory.getLogger(BenchmarkPartitionableKafkaOutputOperator.class);
-  
+
   private String topic = "benchmark";
 
   @Min(1)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/benchmark/src/main/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkApp.java
----------------------------------------------------------------------
diff --git a/benchmark/src/main/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkApp.java b/benchmark/src/main/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkApp.java
index 7d9c3ba..eab02db 100644
--- a/benchmark/src/main/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkApp.java
+++ b/benchmark/src/main/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkApp.java
@@ -53,7 +53,7 @@ public class ManagedStateBenchmarkApp implements StreamingApplication
 
   protected StoreOperator storeOperator;
   protected int timeRange = 1000 * 60; // one minute range of hot keys
-  
+
   @Override
   public void populateDAG(DAG dag, Configuration conf)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/benchmark/src/main/java/com/datatorrent/benchmark/state/StoreOperator.java
----------------------------------------------------------------------
diff --git a/benchmark/src/main/java/com/datatorrent/benchmark/state/StoreOperator.java b/benchmark/src/main/java/com/datatorrent/benchmark/state/StoreOperator.java
index 3298543..2748c29 100644
--- a/benchmark/src/main/java/com/datatorrent/benchmark/state/StoreOperator.java
+++ b/benchmark/src/main/java/com/datatorrent/benchmark/state/StoreOperator.java
@@ -48,7 +48,7 @@ public class StoreOperator extends BaseOperator implements Operator.CheckpointNo
     UPDATESYNC,
     UPDATEASYNC
   }
-  
+
   protected static final int numOfWindowPerStatistics = 10;
 
   //this is the store we are going to use
@@ -60,10 +60,10 @@ public class StoreOperator extends BaseOperator implements Operator.CheckpointNo
   protected long tupleCount = 0;
   protected int windowCountPerStatistics = 0;
   protected long statisticsBeginTime = 0;
-  
+
   protected ExecMode execMode = ExecMode.INSERT;
   protected int timeRange = 1000 * 60;
-  
+
   public final transient DefaultInputPort<KeyValPair<byte[], byte[]>> input = new DefaultInputPort<KeyValPair<byte[], byte[]>>()
   {
     @Override
@@ -102,9 +102,9 @@ public class StoreOperator extends BaseOperator implements Operator.CheckpointNo
 
   protected transient Queue<Future<Slice>> taskQueue = new LinkedList<Future<Slice>>();
   protected transient Map<Future<Slice>, KeyValPair<byte[], byte[]>> taskToPair = Maps.newHashMap();
-  
+
   /**
-   * we verify 3 type of operation 
+   * we verify 3 type of operation
    * @param tuple
    */
   protected void processTuple(KeyValPair<byte[], byte[]> tuple)
@@ -119,21 +119,21 @@ public class StoreOperator extends BaseOperator implements Operator.CheckpointNo
         store.getSync(getTimeByKey(tuple.getKey()), new Slice(tuple.getKey()));
         insertValueToStore(tuple);
         break;
-        
+
       default: //insert
         insertValueToStore(tuple);
     }
   }
-  
+
   protected long getTimeByKey(byte[] key)
   {
     long lKey = ByteBuffer.wrap(key).getLong();
     return lKey - (lKey % timeRange);
   }
-  
+
   // give a barrier to avoid used up memory
   protected final int taskBarrier = 100000;
-  
+
   /**
    * This method first send request of get to the state manager, then handle all the task(get) which already done and update the value.
    * @param tuple
@@ -143,14 +143,14 @@ public class StoreOperator extends BaseOperator implements Operator.CheckpointNo
     if (taskQueue.size() > taskBarrier) {
       //slow down to avoid too much task waiting.
       try {
-        
+
         logger.info("Queue Size: {}, wait time(milli-seconds): {}", taskQueue.size(), taskQueue.size() / taskBarrier);
         Thread.sleep(taskQueue.size() / taskBarrier);
       } catch (Exception e) {
         //ignore
       }
     }
-    
+
     //send request of get to the state manager and add to the taskQueue and taskToPair.
     //the reason of an extra taskQueue to make sure the tasks are ordered
     {
@@ -171,7 +171,7 @@ public class StoreOperator extends BaseOperator implements Operator.CheckpointNo
       insertValueToStore(taskToPair.remove(task));
     }
   }
-  
+
   protected void insertValueToStore(KeyValPair<byte[], byte[]> tuple)
   {
     Slice key = new Slice(tuple.getKey());
@@ -232,7 +232,7 @@ public class StoreOperator extends BaseOperator implements Operator.CheckpointNo
   {
     return execMode.name();
   }
-  
+
   public void setExecModeStr(String execModeStr)
   {
     //this method used for set configuration. so, use case-insensitive
@@ -252,5 +252,5 @@ public class StoreOperator extends BaseOperator implements Operator.CheckpointNo
   {
     this.timeRange = timeRange;
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/benchmark/src/test/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkAppTester.java
----------------------------------------------------------------------
diff --git a/benchmark/src/test/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkAppTester.java b/benchmark/src/test/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkAppTester.java
index 93a7720..4435aad 100644
--- a/benchmark/src/test/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkAppTester.java
+++ b/benchmark/src/test/java/com/datatorrent/benchmark/state/ManagedStateBenchmarkAppTester.java
@@ -39,31 +39,31 @@ import com.datatorrent.benchmark.state.StoreOperator.ExecMode;
 public class ManagedStateBenchmarkAppTester extends ManagedStateBenchmarkApp
 {
   public static final String basePath = "target/temp";
-  
+
   @Before
   public void before()
   {
     FileUtil.fullyDelete(new File(basePath));
   }
-  
+
   @Test
   public void testUpdateSync() throws Exception
   {
     test(ExecMode.UPDATESYNC);
   }
-  
+
   @Test
   public void testUpdateAsync() throws Exception
   {
     test(ExecMode.UPDATEASYNC);
   }
-  
+
   @Test
   public void testInsert() throws Exception
   {
     test(ExecMode.INSERT);
   }
-  
+
   public void test(ExecMode exeMode) throws Exception
   {
     Configuration conf = new Configuration(false);
@@ -73,7 +73,7 @@ public class ManagedStateBenchmarkAppTester extends ManagedStateBenchmarkApp
 
     super.populateDAG(dag, conf);
     storeOperator.execMode = exeMode;
-    
+
     StreamingApplication app = new StreamingApplication()
     {
       @Override
@@ -92,7 +92,7 @@ public class ManagedStateBenchmarkAppTester extends ManagedStateBenchmarkApp
   }
 
 
-  
+
   @Override
   public String getStoreBasePath(Configuration conf)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/avro/AvroFileInputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/avro/AvroFileInputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/avro/AvroFileInputOperator.java
index b03e31a..01e99d3 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/avro/AvroFileInputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/avro/AvroFileInputOperator.java
@@ -50,7 +50,7 @@ import com.datatorrent.lib.io.fs.AbstractFileInputOperator;
  * input file<br>
  * Users can add the {@link FSWindowDataManager}
  * to ensure exactly once semantics with a HDFS backed WAL.
- * 
+ *
  * @displayName AvroFileInputOperator
  * @category Input
  * @tags fs, file,avro, input operator
@@ -81,7 +81,7 @@ public class AvroFileInputOperator extends AbstractFileInputOperator<GenericReco
 
   /**
    * Returns a input stream given a file path
-   * 
+   *
    * @param path
    * @return InputStream
    * @throws IOException
@@ -101,7 +101,7 @@ public class AvroFileInputOperator extends AbstractFileInputOperator<GenericReco
   /**
    * Reads a GenericRecord from the given input stream<br>
    * Emits the FileName,Offset,Exception on the error port if its connected
-   * 
+   *
    * @return GenericRecord
    */
   @Override

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/avro/AvroToPojo.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/avro/AvroToPojo.java b/contrib/src/main/java/com/datatorrent/contrib/avro/AvroToPojo.java
index ad54491..1951c1e 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/avro/AvroToPojo.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/avro/AvroToPojo.java
@@ -140,7 +140,7 @@ public class AvroToPojo extends BaseOperator
 
   /**
    * Returns a POJO from a Generic Record
-   * 
+   *
    * @return Object
    */
   @SuppressWarnings("unchecked")
@@ -220,7 +220,7 @@ public class AvroToPojo extends BaseOperator
   /**
    * Use reflection to generate field info values if the user has not provided
    * the inputs mapping
-   * 
+   *
    * @return String representing the POJO field to Avro field mapping
    */
   private String generateFieldInfoInputs(Class<?> cls)
@@ -240,7 +240,7 @@ public class AvroToPojo extends BaseOperator
   /**
    * Creates a map representing fieldName in POJO:field in Generic Record:Data
    * type
-   * 
+   *
    * @return List of FieldInfo
    */
   private List<FieldInfo> createFieldInfoMap(String str)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/avro/PojoToAvro.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/avro/PojoToAvro.java b/contrib/src/main/java/com/datatorrent/contrib/avro/PojoToAvro.java
index 5fd7ee2..2f8fb19 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/avro/PojoToAvro.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/avro/PojoToAvro.java
@@ -97,7 +97,7 @@ public class PojoToAvro extends BaseOperator
 
   /**
    * Returns the schema string for Avro Generic Record
-   * 
+   *
    * @return schemaString
    */
   public String getSchemaString()
@@ -115,7 +115,7 @@ public class PojoToAvro extends BaseOperator
 
   /**
    * Returns the schema object
-   * 
+   *
    * @return schema
    */
   private Schema getSchema()
@@ -133,7 +133,7 @@ public class PojoToAvro extends BaseOperator
 
   /**
    * Returns the list for field names from provided Avro schema
-   * 
+   *
    * @return List of Fields
    */
   private List<Field> getColumnNames()
@@ -151,7 +151,7 @@ public class PojoToAvro extends BaseOperator
 
   /**
    * This method generates the getters for provided field of a given class
-   * 
+   *
    * @return Getter
    */
   private Getter<?, ?> generateGettersForField(Class<?> cls, String inputFieldName)
@@ -232,7 +232,7 @@ public class PojoToAvro extends BaseOperator
 
   /**
    * Returns a generic record mapping the POJO fields to provided schema
-   * 
+   *
    * @return Generic Record
    */
   private GenericRecord getGenericRecord(Object tuple) throws Exception

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/AbstractElasticSearchOutputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/AbstractElasticSearchOutputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/AbstractElasticSearchOutputOperator.java
index 7753108..0282ae8 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/AbstractElasticSearchOutputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/AbstractElasticSearchOutputOperator.java
@@ -57,7 +57,7 @@ import com.datatorrent.lib.db.AbstractStoreOutputOperator;
  * @displayName Elastic Search Output
  * @category Output
  * @tags elastic search
- * 
+ *
  * @since 2.1.0
  */
 public abstract class AbstractElasticSearchOutputOperator<T, S extends ElasticSearchConnectable> extends AbstractStoreOutputOperator<T, S>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchConnectable.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchConnectable.java b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchConnectable.java
index fdf4e62..34eca95 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchConnectable.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchConnectable.java
@@ -90,7 +90,7 @@ public class ElasticSearchConnectable implements Connectable
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see com.datatorrent.lib.db.Connectable#connect()
    */
   @Override
@@ -102,7 +102,7 @@ public class ElasticSearchConnectable implements Connectable
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see com.datatorrent.lib.db.Connectable#disconnect()
    */
   @Override
@@ -115,7 +115,7 @@ public class ElasticSearchConnectable implements Connectable
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see com.datatorrent.lib.db.Connectable#isConnected()
    */
   @Override

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapInputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapInputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapInputOperator.java
index 024b098..dcbee9d 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapInputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapInputOperator.java
@@ -40,7 +40,7 @@ public abstract class ElasticSearchMapInputOperator<T extends Map<String, Object
   protected String type;
 
   /**
-   * 
+   *
    */
   public ElasticSearchMapInputOperator()
   {
@@ -49,7 +49,7 @@ public abstract class ElasticSearchMapInputOperator<T extends Map<String, Object
 
   /**
    * {@link SearchRequestBuilder} properties which do not change for each window are set during operator initialization.
-   * 
+   *
    * @see com.datatorrent.contrib.elasticsearch.AbstractElasticSearchInputOperator#setup(com.datatorrent.api.Context.OperatorContext)
    */
   @Override
@@ -61,7 +61,7 @@ public abstract class ElasticSearchMapInputOperator<T extends Map<String, Object
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see
    * com.datatorrent.contrib.elasticsearch.AbstractElasticSearchInputOperator#convertToTuple(org.elasticsearch.search
    * .SearchHit)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapOutputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapOutputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapOutputOperator.java
index 51a4688..8616938 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapOutputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchMapOutputOperator.java
@@ -38,7 +38,7 @@ public class ElasticSearchMapOutputOperator<T extends Map<String, Object>> exten
   private String type;
 
   /**
-   * 
+   *
    */
   public ElasticSearchMapOutputOperator()
   {
@@ -48,7 +48,7 @@ public class ElasticSearchMapOutputOperator<T extends Map<String, Object>> exten
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see
    * com.datatorrent.contrib.elasticsearch.AbstractElasticSearchOutputOperator#setSource(org.elasticsearch.action.index
    * .IndexRequestBuilder, java.lang.Object)
@@ -61,7 +61,7 @@ public class ElasticSearchMapOutputOperator<T extends Map<String, Object>> exten
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see com.datatorrent.contrib.elasticsearch.AbstractElasticSearchOutputOperator#getId(java.lang.Object)
    */
   @Override
@@ -103,7 +103,7 @@ public class ElasticSearchMapOutputOperator<T extends Map<String, Object>> exten
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see com.datatorrent.contrib.elasticsearch.AbstractElasticSearchOutputOperator#getIndexName(java.lang.Object)
    */
   @Override
@@ -128,5 +128,5 @@ public class ElasticSearchMapOutputOperator<T extends Map<String, Object>> exten
   protected String getType(T tuple)
   {
     return type;
-  }  
+  }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolatorStore.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolatorStore.java b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolatorStore.java
index 7d88336..c13c025 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolatorStore.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolatorStore.java
@@ -28,7 +28,7 @@ import org.elasticsearch.index.query.QueryBuilder;
 import com.datatorrent.netlet.util.DTThrowable;
 
 /**
- * 
+ *
  * @since 2.1.0
  */
 public class ElasticSearchPercolatorStore extends ElasticSearchConnectable
@@ -44,7 +44,7 @@ public class ElasticSearchPercolatorStore extends ElasticSearchConnectable
   public void registerPercolateQuery(String indexName, String queryName, QueryBuilder queryBuilder)
   {
     try {
-      
+
       client.prepareIndex(indexName, PERCOLATOR_TYPE, queryName)
         .setSource(XContentFactory.jsonBuilder()
             .startObject()
@@ -52,22 +52,22 @@ public class ElasticSearchPercolatorStore extends ElasticSearchConnectable
             .endObject())
         .setRefresh(true)
         .execute().actionGet();
-      
+
     } catch (IOException e) {
       DTThrowable.rethrow(e);
     }
   }
-  
+
   public PercolateResponse percolate(String[] indexNames, String documentType, Object tuple){
     XContentBuilder docBuilder;
     try {
-      
+
       docBuilder = XContentFactory.jsonBuilder().startObject();
       docBuilder.field("doc").startObject(); //This is needed to designate the document
       docBuilder.field("content", tuple);
       docBuilder.endObject(); //End of the doc field
       docBuilder.endObject();//End of the JSON root object
-      
+
       return client.preparePercolate().setIndices(indexNames)
           .setDocumentType(documentType)
           .setSource(docBuilder)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/enrich/DelimitedFSLoader.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/enrich/DelimitedFSLoader.java b/contrib/src/main/java/com/datatorrent/contrib/enrich/DelimitedFSLoader.java
index 9fa7129..3121cf1 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/enrich/DelimitedFSLoader.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/enrich/DelimitedFSLoader.java
@@ -146,7 +146,7 @@ public class DelimitedFSLoader extends FSLoader
 
   /**
    * Get the schema
-   * 
+   *
    * @return
    */
   public String getSchema()
@@ -156,7 +156,7 @@ public class DelimitedFSLoader extends FSLoader
 
   /**
    * Set the schema
-   * 
+   *
    * @param schema
    */
   public void setSchema(String schema)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/enrich/FSLoader.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/enrich/FSLoader.java b/contrib/src/main/java/com/datatorrent/contrib/enrich/FSLoader.java
index 997243d..464fa99 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/enrich/FSLoader.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/enrich/FSLoader.java
@@ -117,7 +117,7 @@ public abstract class FSLoader extends ReadOnlyBackup
    * a record. Concrete implementations override this method to parse a record
    * and convert it to Map of field names and values OR simply returns null to
    * skip the records.
-   * 
+   *
    * @param line
    *          A single record from file
    * @return a map with field name and value. Null value if returned is ignored

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/enrich/FixedWidthFSLoader.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/enrich/FixedWidthFSLoader.java b/contrib/src/main/java/com/datatorrent/contrib/enrich/FixedWidthFSLoader.java
index 8b7eac0..d37ce3e 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/enrich/FixedWidthFSLoader.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/enrich/FixedWidthFSLoader.java
@@ -94,7 +94,7 @@ public class FixedWidthFSLoader extends FSLoader
 
   /**
    * Set to true if file has header
-   * 
+   *
    * @param hasHeader
    *          Indicates whether first line of the file is a header. Default is
    *          false
@@ -106,7 +106,7 @@ public class FixedWidthFSLoader extends FSLoader
 
   /**
    * Gets the field description
-   * 
+   *
    * @return fieldDescription. String specifying information related to fields
    *         in fixed-width file.
    */
@@ -117,7 +117,7 @@ public class FixedWidthFSLoader extends FSLoader
 
   /**
    * Sets fieldDescription
-   * 
+   *
    * @param fieldDescription
    *          a String specifying information related to fields in fixed-width
    *          file. Format is [NAME]:[FIELD_TYPE]:[WIDTH]:[date format if
@@ -135,7 +135,7 @@ public class FixedWidthFSLoader extends FSLoader
   /**
    * Gets the character used for padding in the fixed-width file.Default is
    * white space (' ')
-   * 
+   *
    * @return Padding character. Default is white space.
    */
   public char getPadding()
@@ -146,7 +146,7 @@ public class FixedWidthFSLoader extends FSLoader
   /**
    * Sets the character used for padding in fixed-width file.Default is white
    * space (' ')
-   * 
+   *
    * @param padding
    *          Padding character. Default is white space.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/formatter/CsvFormatter.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/formatter/CsvFormatter.java b/contrib/src/main/java/com/datatorrent/contrib/formatter/CsvFormatter.java
index 34ba49c..2979b44 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/formatter/CsvFormatter.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/formatter/CsvFormatter.java
@@ -58,7 +58,7 @@ import com.datatorrent.netlet.util.DTThrowable;
  * <b>in</b>:input tuple as a POJO. Each tuple represents a record<br>
  * <b>out</b>:tuples are are converted to string are emitted on this port<br>
  * <b>err</b>:tuples that could not be converted are emitted on this port<br>
- * 
+ *
  * @displayName CsvFormatter
  * @category Formatter
  * @tags pojo csv formatter
@@ -180,7 +180,7 @@ public class CsvFormatter extends Formatter<String>
 
   /**
    * Get the schema
-   * 
+   *
    * @return schema
    */
   public String getSchema()
@@ -190,7 +190,7 @@ public class CsvFormatter extends Formatter<String>
 
   /**
    * Set the schema
-   * 
+   *
    * @param schema
    */
   public void setSchema(String schema)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeInputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeInputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeInputOperator.java
index bc8c1f0..497e6e4 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeInputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeInputOperator.java
@@ -25,14 +25,14 @@ import com.datatorrent.lib.db.AbstractKeyValueStoreInputOperator;
  * concrete operator should be created from this skeleton implementation.
  * <p>
  * </p>
- * 
+ *
  * @displayName Abstract Geode Input
  * @category Input
  * @tags geode, key value
  *
  * @param <T>
  *          The tuple type.
- * 
+ *
  *
  * @since 3.4.0
  */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeOutputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeOutputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeOutputOperator.java
index 7b0e158..dd0bad2 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeOutputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/geode/AbstractGeodeOutputOperator.java
@@ -25,14 +25,14 @@ import com.datatorrent.lib.db.AbstractStoreOutputOperator;
  * operator should be created from this skeleton implementation.
  * <p>
  * </p>
- * 
+ *
  * @displayName Abstract Geode Output
  * @category Output
  * @tags geode, key value
  *
  * @param <T>
  *          The tuple type.
- * 
+ *
  *
  * @since 3.4.0
  */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeCheckpointStore.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeCheckpointStore.java b/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeCheckpointStore.java
index edd07d9..2152b97 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeCheckpointStore.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeCheckpointStore.java
@@ -47,7 +47,7 @@ import java.util.Map.Entry;
 /**
  * Geode Store implementation of {@link StorageAgentKeyValueStore} Uses {@link Kryo}
  * serialization to store retrieve objects
- * 
+ *
  *
  *
  * @since 3.4.0
@@ -56,7 +56,7 @@ public class GeodeCheckpointStore
     implements StorageAgentKeyValueStore, Serializable
 {
 
-  public static final String GET_KEYS_QUERY = 
+  public static final String GET_KEYS_QUERY =
       "SELECT entry.key FROM /$[region}.entries entry WHERE entry.key LIKE '${operator.id}%'";
 
   private String geodeLocators;
@@ -82,7 +82,7 @@ public class GeodeCheckpointStore
 
   /**
    * Initializes Geode store by using locator connection string
-   * 
+   *
    * @param locatorString
    */
   public GeodeCheckpointStore(String locatorString)
@@ -101,7 +101,7 @@ public class GeodeCheckpointStore
 
   /**
    * Get the Geode locator connection string
-   * 
+   *
    * @return locator connection string
    */
   public String getGeodeLocators()
@@ -111,7 +111,7 @@ public class GeodeCheckpointStore
 
   /**
    * Sets the Geode locator string
-   * 
+   *
    * @param geodeLocators
    */
   public void setGeodeLocators(String geodeLocators)
@@ -160,7 +160,7 @@ public class GeodeCheckpointStore
 
   /**
    * Creates a region
-   * 
+   *
    */
   public synchronized void createRegion()
   {
@@ -185,7 +185,7 @@ public class GeodeCheckpointStore
 
   /**
    * Check if store is connected to configured Geode cluster or not
-   * 
+   *
    * @return True is connected to Geode cluster and client cache is active
    */
   @Override
@@ -199,7 +199,7 @@ public class GeodeCheckpointStore
 
   /**
    * Return the value for specified key from Geode region
-   * 
+   *
    * @return the value object
    */
   @Override
@@ -252,7 +252,7 @@ public class GeodeCheckpointStore
 
   /**
    * Get list for keys starting from provided key name
-   * 
+   *
    * @return List of keys
    */
   @Override

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeKeyValueStorageAgent.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeKeyValueStorageAgent.java b/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeKeyValueStorageAgent.java
index fdfd4ce..691c2c1 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeKeyValueStorageAgent.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeKeyValueStorageAgent.java
@@ -27,7 +27,7 @@ import com.datatorrent.lib.util.AbstractKeyValueStorageAgent;
 /**
  * Storage Agent implementation which uses {@link GeodeCheckpointStore} for operator
  * checkpointing
- * 
+ *
  *
  *
  * @since 3.4.0

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/geode/GeodePOJOOutputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/geode/GeodePOJOOutputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/geode/GeodePOJOOutputOperator.java
index defaa54..c7d22c7 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/geode/GeodePOJOOutputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/geode/GeodePOJOOutputOperator.java
@@ -30,7 +30,7 @@ import com.datatorrent.lib.util.TableInfo;
  * @displayName Geode Output Operator
  * @category Output
  * @tags pojo, geode
- * 
+ *
  *
  * @since 3.4.0
  */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeStore.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeStore.java b/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeStore.java
index bdb7add..d345661 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeStore.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/geode/GeodeStore.java
@@ -51,14 +51,14 @@ import com.datatorrent.lib.db.KeyValueStore;
  *  that provides reliable asynchronous event notifications and guaranteed message delivery.
  * Geode is a data management platform that provides real-time
  * , consistent access to data-intensive applications.
- * 
+ *
  *
  * @since 3.4.0
  */
 public class GeodeStore implements KeyValueStore, Serializable
 {
   /**
-   * 
+   *
    */
   private static final long serialVersionUID = -5076452548893319967L;
   private static final Logger logger = LoggerFactory.getLogger(GeodeStore.class);
@@ -198,7 +198,7 @@ public class GeodeStore implements KeyValueStore, Serializable
     try {
       return (getRegion().get(key));
     } catch (IOException ex) {
-      throw new RuntimeException("Exception while getting the object", ex);      
+      throw new RuntimeException("Exception while getting the object", ex);
 
     }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/geode/RegionCreateFunction.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/geode/RegionCreateFunction.java b/contrib/src/main/java/com/datatorrent/contrib/geode/RegionCreateFunction.java
index bc808ad..9e948c4 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/geode/RegionCreateFunction.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/geode/RegionCreateFunction.java
@@ -33,7 +33,7 @@ import com.gemstone.gemfire.cache.execute.FunctionContext;
 
 /**
  * Function to create region dynamically through client API
- * 
+ *
  *
  * @since 3.4.0
  */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/hbase/HBaseFieldInfo.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/hbase/HBaseFieldInfo.java b/contrib/src/main/java/com/datatorrent/contrib/hbase/HBaseFieldInfo.java
index a43b893..6a34a91 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/hbase/HBaseFieldInfo.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/hbase/HBaseFieldInfo.java
@@ -29,11 +29,11 @@ import com.datatorrent.lib.util.FieldInfo;
 public class HBaseFieldInfo extends FieldInfo
 {
 	private String familyName;
-	
+
 	public HBaseFieldInfo()
 	{
 	}
-	
+
 	public HBaseFieldInfo( String columnName, String columnExpression, SupportType type, String familyName )
 	{
 	  super( columnName, columnExpression, type );
@@ -49,7 +49,7 @@ public class HBaseFieldInfo extends FieldInfo
 	{
 		this.familyName = familyName;
 	}
-	
+
 	public byte[] toBytes( Object value )
 	{
 		final SupportType type = getType();
@@ -57,28 +57,28 @@ public class HBaseFieldInfo extends FieldInfo
 		{
 		case BOOLEAN:
 		  return Bytes.toBytes( (Boolean)value );
-		  
+
 		case SHORT:
 		  return Bytes.toBytes( (Short)value );
-		  
+
 		case INTEGER:
 		  return Bytes.toBytes( (Integer)value );
-		  
+
 		case LONG:
 		  return Bytes.toBytes( (Long)value );
-		  
+
 		case FLOAT:
 		  return Bytes.toBytes( (Float)value );
-		  
+
 		case DOUBLE:
 		  return Bytes.toBytes( (Double)value );
-		  
+
 		case STRING:
 		  return Bytes.toBytes( (String)value );
 		}
 		throw new IllegalArgumentException( "Unsupported type: " + type );
 	}
-	
+
 	public Object toValue( byte[] bytes )
 	{
     final SupportType type = getType();
@@ -86,26 +86,26 @@ public class HBaseFieldInfo extends FieldInfo
     {
     case BOOLEAN:
       return Bytes.toBoolean( bytes );
-      
+
     case SHORT:
       return Bytes.toShort( bytes );
-      
+
     case INTEGER:
       return Bytes.toInt( bytes );
-      
+
     case LONG:
       return Bytes.toLong( bytes );
-      
+
     case FLOAT:
       return Bytes.toFloat( bytes );
-      
+
     case DOUBLE:
       return Bytes.toDouble( bytes );
-      
+
     case STRING:
       return Bytes.toString( bytes );
     }
     throw new IllegalArgumentException( "Unsupported type: " + type );
   }
-	
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/kafka/AbstractKafkaInputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/kafka/AbstractKafkaInputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/kafka/AbstractKafkaInputOperator.java
index fc11bf7..1218f4a 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/kafka/AbstractKafkaInputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/kafka/AbstractKafkaInputOperator.java
@@ -202,7 +202,7 @@ public abstract class AbstractKafkaInputOperator<K extends KafkaConsumer> implem
   protected abstract void emitTuple(Message message);
 
   /**
-   * Concrete class derived from KafkaInputOpertor should implement this method if it wants to access kafka offset and partitionId along with kafka message. 
+   * Concrete class derived from KafkaInputOpertor should implement this method if it wants to access kafka offset and partitionId along with kafka message.
    */
   protected void emitTuple(KafkaConsumer.KafkaMessage message)
   {
@@ -524,7 +524,7 @@ public abstract class AbstractKafkaInputOperator<K extends KafkaConsumer> implem
     Set<Integer> deletedOperators = Sets.newHashSet();
     Collection<Partition<AbstractKafkaInputOperator<K>>> resultPartitions = partitions;
     boolean numPartitionsChanged = false;
-    
+
     switch (strategy) {
 
     // For the 1 to 1 mapping The framework will create number of operator partitions based on kafka topic partitions
@@ -617,7 +617,7 @@ public abstract class AbstractKafkaInputOperator<K extends KafkaConsumer> implem
     default:
       break;
     }
-  
+
     if (numPartitionsChanged) {
       List<WindowDataManager> managers = windowDataManager.partition(resultPartitions.size(), deletedOperators);
       int i = 0;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/kafka/HighlevelKafkaConsumer.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/kafka/HighlevelKafkaConsumer.java b/contrib/src/main/java/com/datatorrent/contrib/kafka/HighlevelKafkaConsumer.java
index 5b9c5ed..85cee56 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/kafka/HighlevelKafkaConsumer.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/kafka/HighlevelKafkaConsumer.java
@@ -123,11 +123,11 @@ public class HighlevelKafkaConsumer extends KafkaConsumer
       Properties config = new Properties();
       config.putAll(consumerConfig);
       config.setProperty("zookeeper.connect", zookeeperMap.get(cluster).iterator().next());
-      // create consumer connector will start a daemon thread to monitor the metadata change 
-      // we want to start this thread until the operator is activated 
+      // create consumer connector will start a daemon thread to monitor the metadata change
+      // we want to start this thread until the operator is activated
       standardConsumer.put(cluster, kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(config)));
     }
-    
+
     Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
 
     if (numStream == null || numStream.size() == 0) {
@@ -232,5 +232,5 @@ public class HighlevelKafkaConsumer extends KafkaConsumer
     // offset is not useful for high-level kafka consumer
     throw new UnsupportedOperationException("Offset request is currently not supported for high-level consumer");
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/kafka/KafkaPartition.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/kafka/KafkaPartition.java b/contrib/src/main/java/com/datatorrent/contrib/kafka/KafkaPartition.java
index a86a205..9954eb3 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/kafka/KafkaPartition.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/kafka/KafkaPartition.java
@@ -26,7 +26,7 @@ import java.io.Serializable;
 public class KafkaPartition implements Serializable
 {
   protected static final String DEFAULT_CLUSTERID = "com.datatorrent.contrib.kafka.defaultcluster";
-  
+
   @SuppressWarnings("unused")
   private KafkaPartition()
   {
@@ -46,15 +46,15 @@ public class KafkaPartition implements Serializable
   }
 
   /**
-   * 
+   *
    */
   private static final long serialVersionUID = 7556802229202221546L;
-  
+
 
   private String clusterId;
-  
+
   private int partitionId;
-  
+
   private String topic;
 
   public String getClusterId()
@@ -128,7 +128,7 @@ public class KafkaPartition implements Serializable
   {
     return "KafkaPartition [clusterId=" + clusterId + ", partitionId=" + partitionId + ", topic=" + topic + "]";
   }
-  
-  
-  
+
+
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/kafka/OffsetManager.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/kafka/OffsetManager.java b/contrib/src/main/java/com/datatorrent/contrib/kafka/OffsetManager.java
index 5eb0575..0dee11e 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/kafka/OffsetManager.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/kafka/OffsetManager.java
@@ -33,11 +33,11 @@ public interface OffsetManager
 //
 
   /**
-   * 
+   *
    * Load initial offsets for all kafka partition
    * <br>
    * The method is called at the first attempt of creating partitions and the return value is used as initial offset for simple consumer
-   * 
+   *
    * @return Map of Kafka KafkaPartition as key and long offset as value
    */
   public Map<KafkaPartition, Long> loadInitialOffsets();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/kafka/SimpleKafkaConsumer.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/kafka/SimpleKafkaConsumer.java b/contrib/src/main/java/com/datatorrent/contrib/kafka/SimpleKafkaConsumer.java
index fb89389..4db1d69 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/kafka/SimpleKafkaConsumer.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/kafka/SimpleKafkaConsumer.java
@@ -69,7 +69,7 @@ import kafka.message.MessageAndOffset;
  * <br>
  *
  * Load balance: <br>
- * <li>The consumer create several data-consuming threads to consume the data from broker(s)</li> 
+ * <li>The consumer create several data-consuming threads to consume the data from broker(s)</li>
  * <li>Each thread has only ONE kafka client connecting to ONE broker to consume data from for multiple partitions </li>
  * <li>
  * There is ONE separate thread to monitor the leadership for all the partitions of the topic at every
@@ -89,7 +89,7 @@ public class SimpleKafkaConsumer extends KafkaConsumer
 {
 
   /**
-   * The data-consuming thread that use one simple kafka client to connect to one broker which is the leader of the partition(s) that this consumer is interested 
+   * The data-consuming thread that use one simple kafka client to connect to one broker which is the leader of the partition(s) that this consumer is interested
    */
   static final class ConsumerThread implements Runnable
   {
@@ -161,7 +161,7 @@ public class SimpleKafkaConsumer extends KafkaConsumer
               KafkaPartition kafkaPartition = iterator.next();
               short errorCode = fetchResponse.errorCode(consumer.topic, kafkaPartition.getPartitionId());
               if (fetchResponse.hasError() && errorCode != ErrorMapping.NoError()) {
-                // Kick off partition(s) which has error when fetch from this broker temporarily 
+                // Kick off partition(s) which has error when fetch from this broker temporarily
                 // Monitor will find out which broker it goes in monitor thread
                 logger.warn("Error when consuming topic {} from broker {} with error {} ", kafkaPartition, broker,
                   ErrorMapping.exceptionFor(errorCode));
@@ -177,7 +177,7 @@ public class SimpleKafkaConsumer extends KafkaConsumer
                 consumer.partitionToBroker.remove(kafkaPartition);
                 consumer.stats.updatePartitionStats(kafkaPartition, -1, "");
                 continue;
-              } 
+              }
               // If the fetchResponse either has no error or the no error for $kafkaPartition get the data
               long offset = -1l;
               for (MessageAndOffset msg : fetchResponse.messageSet(consumer.topic, kafkaPartition.getPartitionId())) {
@@ -200,7 +200,7 @@ public class SimpleKafkaConsumer extends KafkaConsumer
           // Update consumer that these partitions are currently stop being consumed because of some unrecoverable exception
           consumer.partitionToBroker.remove(kpForConsumer);
         }
-        
+
         logger.info("Exit the consumer thread for broker {} ", broker);
       }
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/kinesis/AbstractKinesisOutputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/kinesis/AbstractKinesisOutputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/kinesis/AbstractKinesisOutputOperator.java
index 7be453a..43fc62a 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/kinesis/AbstractKinesisOutputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/kinesis/AbstractKinesisOutputOperator.java
@@ -69,14 +69,14 @@ public abstract class AbstractKinesisOutputOperator<V, T> implements Operator
    * @return
    */
   protected abstract byte[] getRecord(V value);
-  
+
   /**
    * convert tuple to pair of key and value. the key will be used as PartitionKey, and the value used as Data
    * @param tuple
    * @return
    */
   protected abstract Pair<String, V> tupleToKeyValue(T tuple);
-  
+
   List<PutRecordsRequestEntry> putRecordsRequestEntryList = new ArrayList<PutRecordsRequestEntry>();
   // Max size of each record: 50KB, Max size of putRecords: 4.5MB
   // So, default capacity would be 4.5MB/50KB = 92
@@ -145,7 +145,7 @@ public abstract class AbstractKinesisOutputOperator<V, T> implements Operator
     {
       processTuple( tuple );
     }
-    
+
   };
 
   public void processTuple(T tuple)
@@ -169,15 +169,15 @@ public abstract class AbstractKinesisOutputOperator<V, T> implements Operator
         requestRecord.setData(ByteBuffer.wrap(getRecord(keyValue.second)));
 
         client.putRecord(requestRecord);
-        
+
       }
       sendCount++;
     } catch (AmazonClientException e) {
       throw new RuntimeException(e);
     }
   }
-  
-  
+
+
   private void addRecord(T tuple)
   {
     try {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/memcache/MemcacheStore.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/memcache/MemcacheStore.java b/contrib/src/main/java/com/datatorrent/contrib/memcache/MemcacheStore.java
index 973f1ee..1465f03 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/memcache/MemcacheStore.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/memcache/MemcacheStore.java
@@ -52,7 +52,7 @@ public class MemcacheStore implements KeyValueStore
   {
     serverAddresses.add(addr);
   }
-  
+
 
   public List<InetSocketAddress> getServerAddresses()
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/mqtt/MqttClientConfig.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/mqtt/MqttClientConfig.java b/contrib/src/main/java/com/datatorrent/contrib/mqtt/MqttClientConfig.java
index ee16786..e8f52df 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/mqtt/MqttClientConfig.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/mqtt/MqttClientConfig.java
@@ -294,7 +294,7 @@ public class MqttClientConfig
 
   /**
    * Sets the port
-   * 
+   *
    * @param port the port
    */
   public void setPort(int port)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/parquet/AbstractParquetFileReader.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/parquet/AbstractParquetFileReader.java b/contrib/src/main/java/com/datatorrent/contrib/parquet/AbstractParquetFileReader.java
index 1be2f0d..c4eefff 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/parquet/AbstractParquetFileReader.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/parquet/AbstractParquetFileReader.java
@@ -92,7 +92,7 @@ public abstract class AbstractParquetFileReader<T> extends AbstractFileInputOper
    * Derived classes need to provide an implementation to convert a Parquet
    * Group to any other type. Each Parquet record is read as a <b>Group</b>
    * (parquet.example.data.Group) and is passed onto this method.
-   * 
+   *
    * @param group
    *          Parquet record represented as a Group
    * @return object of type T
@@ -101,7 +101,7 @@ public abstract class AbstractParquetFileReader<T> extends AbstractFileInputOper
 
   /**
    * Get Parquet Schema as a String
-   * 
+   *
    * @return parquetSchema Parquet Schema as a string.
    */
   public String getParquetSchema()
@@ -111,7 +111,7 @@ public abstract class AbstractParquetFileReader<T> extends AbstractFileInputOper
 
   /**
    * Set Parquet Schema as a String
-   * 
+   *
    * @param parquetSchema
    *          Parquet Schema as a string
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/parquet/ParquetFilePOJOReader.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/parquet/ParquetFilePOJOReader.java b/contrib/src/main/java/com/datatorrent/contrib/parquet/ParquetFilePOJOReader.java
index 8834c18..37bd60b 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/parquet/ParquetFilePOJOReader.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/parquet/ParquetFilePOJOReader.java
@@ -98,7 +98,7 @@ public class ParquetFilePOJOReader extends AbstractParquetFileReader<Object>
    * Converts a Parquet <b>Group</b>(parquet.example.data.Group) to a POJO.
    * Supported parquet primitive types are BOOLEAN, INT32, INT64, FLOAT, DOUBLE
    * and BINARY
-   * 
+   *
    * @throws ParquetEncodingException
    *           if group contains unsupported type
    */
@@ -167,7 +167,7 @@ public class ParquetFilePOJOReader extends AbstractParquetFileReader<Object>
   /**
    * Initializes {@link #activeFieldInfos} by adding fields represented by
    * fieldMapping
-   * 
+   *
    * @param fieldMapping
    *          String representing Parquet field name TO POJO field field name
    *          mapping
@@ -213,7 +213,7 @@ public class ParquetFilePOJOReader extends AbstractParquetFileReader<Object>
 
   /**
    * Returns String containing Parquet field name to POJO field name mapping
-   * 
+   *
    * @return parquetToPOJOFieldsMapping String representing Parquet field name
    *         TO POJO field name mapping
    */
@@ -230,7 +230,7 @@ public class ParquetFilePOJOReader extends AbstractParquetFileReader<Object>
    * long_id_v2:
    * LONG,css_file_loaded:css_file_loaded_v2:BOOLEAN,float_val:float_val_v2:
    * FLOAT,double_val:double_val_v2:DOUBLE
-   * 
+   *
    * @param parquetToPOJOFieldsMapping
    *          String representing Parquet field name TO POJO field name mapping
    */
@@ -261,7 +261,7 @@ public class ParquetFilePOJOReader extends AbstractParquetFileReader<Object>
   /**
    * Use reflection to generate field info values if the user has not provided
    * the inputs mapping.
-   * 
+   *
    * @return String representing the Parquet field name to POJO field name
    *         mapping
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/parser/CellProcessorBuilder.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/parser/CellProcessorBuilder.java b/contrib/src/main/java/com/datatorrent/contrib/parser/CellProcessorBuilder.java
index bf8f85d..e7840aa 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/parser/CellProcessorBuilder.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/parser/CellProcessorBuilder.java
@@ -50,7 +50,7 @@ import com.datatorrent.contrib.parser.DelimitedSchema.FieldType;
  * purpose and can be chained together with other processors to fully automate
  * all of the required conversions and constraint validation for a single
  * delimited record.
- * 
+ *
  *
  * @since 3.4.0
  */
@@ -59,7 +59,7 @@ public class CellProcessorBuilder
 
   /**
    * Method to get cell processors for given field type and constraints
-   * 
+   *
    * @param fieldType
    *          data type of the field
    * @param constraints
@@ -93,7 +93,7 @@ public class CellProcessorBuilder
    * Method to get cellprocessor for String with constraints. These constraints
    * are evaluated against the String field for which this cellprocessor is
    * defined.
-   * 
+   *
    * @param constraints
    *          map of constraints applicable to String
    * @return CellProcessor
@@ -135,7 +135,7 @@ public class CellProcessorBuilder
    * Method to get cellprocessor for Integer with constraints. These constraints
    * are evaluated against the Integer field for which this cellprocessor is
    * defined.
-   * 
+   *
    * @param constraints
    *          map of constraints applicable to Integer
    * @return CellProcessor
@@ -170,7 +170,7 @@ public class CellProcessorBuilder
    * Method to get cellprocessor for Long with constraints. These constraints
    * are evaluated against the Long field for which this cellprocessor is
    * defined.
-   * 
+   *
    * @param constraints
    *          map of constraints applicable to Long
    * @return CellProcessor
@@ -204,7 +204,7 @@ public class CellProcessorBuilder
    * Method to get cellprocessor for Float/Double with constraints. These
    * constraints are evaluated against the Float/Double field for which this
    * cellprocessor is defined.
-   * 
+   *
    * @param constraints
    *          map of constraints applicable to Float/Double
    * @return CellProcessor
@@ -238,7 +238,7 @@ public class CellProcessorBuilder
    * Method to get cellprocessor for Boolean with constraints. These constraints
    * are evaluated against the Boolean field for which this cellprocessor is
    * defined.
-   * 
+   *
    * @param constraints
    *          map of constraints applicable to Boolean
    * @return CellProcessor
@@ -267,7 +267,7 @@ public class CellProcessorBuilder
    * Method to get cellprocessor for Date with constraints. These constraints
    * are evaluated against the Date field for which this cellprocessor is
    * defined.
-   * 
+   *
    * @param constraints
    *          map of constraints applicable to Date
    * @return CellProcessor
@@ -291,7 +291,7 @@ public class CellProcessorBuilder
    * Method to get cellprocessor for Char with constraints. These constraints
    * are evaluated against the Char field for which this cellprocessor is
    * defined.
-   * 
+   *
    * @param constraints
    *          map of constraints applicable to Char
    * @return CellProcessor
@@ -316,7 +316,7 @@ public class CellProcessorBuilder
 
   /**
    * Get a Double Min Max cellprocessor.
-   * 
+   *
    * @param minValue
    *          minimum value.
    * @param maxValue
@@ -332,7 +332,7 @@ public class CellProcessorBuilder
 
   /**
    * Get a Long Min Max cellprocessor.
-   * 
+   *
    * @param minValue
    *          minimum value.
    * @param maxValue
@@ -348,7 +348,7 @@ public class CellProcessorBuilder
 
   /**
    * Get a Int Min Max cellprocessor.
-   * 
+   *
    * @param minValue
    *          minimum value.
    * @param maxValue
@@ -364,7 +364,7 @@ public class CellProcessorBuilder
 
   /**
    * Get Optional cellprocessor which means field is not mandatory.
-   * 
+   *
    * @param cellProcessor
    *          next processor in the chain.
    * @return CellProcessor
@@ -379,7 +379,7 @@ public class CellProcessorBuilder
 
   /**
    * Get cellprocessor to parse String as Integer.
-   * 
+   *
    * @param cellProcessor
    *          next processor in the chain.
    * @return CellProcessor
@@ -394,7 +394,7 @@ public class CellProcessorBuilder
 
   /**
    * Get cellprocessor to parse String as Long.
-   * 
+   *
    * @param cellProcessor
    *          next processor in the chain.
    * @return CellProcessor
@@ -409,7 +409,7 @@ public class CellProcessorBuilder
 
   /**
    * Get cellprocessor to parse String as Double.
-   * 
+   *
    * @param cellProcessor
    *          next processor in the chain.
    * @return CellProcessor
@@ -424,7 +424,7 @@ public class CellProcessorBuilder
 
   /**
    * Get cellprocessor to parse String as Character.
-   * 
+   *
    * @param cellProcessor
    *          next processor in the chain.
    * @return CellProcessor

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/parser/CsvParser.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/parser/CsvParser.java b/contrib/src/main/java/com/datatorrent/contrib/parser/CsvParser.java
index 4698821..ea406e9 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/parser/CsvParser.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/parser/CsvParser.java
@@ -65,7 +65,7 @@ import com.datatorrent.netlet.util.DTThrowable;
  * <b>err</b>:tuples that do not confine to schema are emitted on this port as
  * KeyValPair<String,String><br>
  * Key being the tuple and Val being the reason.
- * 
+ *
  * @displayName CsvParser
  * @category Parsers
  * @tags csv pojo parser
@@ -229,7 +229,7 @@ public class CsvParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Get the schema
-   * 
+   *
    * @return
    */
   public String getSchema()
@@ -239,7 +239,7 @@ public class CsvParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Set the schema
-   * 
+   *
    * @param schema
    */
   public void setSchema(String schema)
@@ -249,7 +249,7 @@ public class CsvParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Get errorTupleCount
-   * 
+   *
    * @return errorTupleCount
    */
   @VisibleForTesting
@@ -260,7 +260,7 @@ public class CsvParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Get emittedObjectCount
-   * 
+   *
    * @return emittedObjectCount
    */
   @VisibleForTesting
@@ -271,7 +271,7 @@ public class CsvParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Get incomingTuplesCount
-   * 
+   *
    * @return incomingTuplesCount
    */
   @VisibleForTesting

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/parser/DelimitedSchema.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/parser/DelimitedSchema.java b/contrib/src/main/java/com/datatorrent/contrib/parser/DelimitedSchema.java
index eb86c15..29b2c92 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/parser/DelimitedSchema.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/parser/DelimitedSchema.java
@@ -175,7 +175,7 @@ public class DelimitedSchema
 
   /**
    * For a given json string, this method sets the field members
-   * 
+   *
    * @param json
    * @throws JSONException
    * @throws IOException
@@ -208,7 +208,7 @@ public class DelimitedSchema
 
   /**
    * Get the list of field names mentioned in schema
-   * 
+   *
    * @return fieldNames
    */
   public List<String> getFieldNames()
@@ -218,7 +218,7 @@ public class DelimitedSchema
 
   /**
    * Get the delimiter character
-   * 
+   *
    * @return delimiterChar
    */
   public int getDelimiterChar()
@@ -228,7 +228,7 @@ public class DelimitedSchema
 
   /**
    * Get the quoteChar
-   * 
+   *
    * @return quoteChar
    */
   public char getQuoteChar()
@@ -238,7 +238,7 @@ public class DelimitedSchema
 
   /**
    * Get the line delimiter
-   * 
+   *
    * @return lineDelimiter
    */
   public String getLineDelimiter()
@@ -255,7 +255,7 @@ public class DelimitedSchema
 
   /**
    * Get the list of Fields.
-   * 
+   *
    * @return fields
    */
   public List<Field> getFields()
@@ -266,7 +266,7 @@ public class DelimitedSchema
   /**
    * Objects of this class represents a particular field in the schema. Each
    * field has a name, type and a set of associated constraints.
-   * 
+   *
    */
   public class Field
   {
@@ -291,7 +291,7 @@ public class DelimitedSchema
 
     /**
      * Get the name of the field
-     * 
+     *
      * @return name
      */
     public String getName()
@@ -301,7 +301,7 @@ public class DelimitedSchema
 
     /**
      * Set the name of the field
-     * 
+     *
      * @param name
      */
     public void setName(String name)
@@ -311,7 +311,7 @@ public class DelimitedSchema
 
     /**
      * Get {@link FieldType}
-     * 
+     *
      * @return type
      */
     public FieldType getType()
@@ -321,7 +321,7 @@ public class DelimitedSchema
 
     /**
      * Set {@link FieldType}
-     * 
+     *
      * @param type
      */
     public void setType(FieldType type)
@@ -331,7 +331,7 @@ public class DelimitedSchema
 
     /**
      * Get the map of constraints associated with the field
-     * 
+     *
      * @return constraints
      */
     public Map<String, Object> getConstraints()
@@ -341,7 +341,7 @@ public class DelimitedSchema
 
     /**
      * Sets the map of constraints associated with the field
-     * 
+     *
      * @param constraints
      */
     public void setConstraints(Map<String, Object> constraints)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/parser/JsonParser.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/parser/JsonParser.java b/contrib/src/main/java/com/datatorrent/contrib/parser/JsonParser.java
index b6c3c4d..bb95f9c 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/parser/JsonParser.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/parser/JsonParser.java
@@ -66,8 +66,8 @@ import com.datatorrent.netlet.util.DTThrowable;
  * <b>err</b>:tuples that do not confine to schema are emitted on this port as
  * KeyValPair<String,String><br>
  * Key being the tuple and Val being the reason.
- * 
- * 
+ *
+ *
  * @displayName JsonParser
  * @category Parsers
  * @tags json pojo parser
@@ -180,7 +180,7 @@ public class JsonParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Get jsonSchema contents as a string to be used during validation
-   * 
+   *
    * @return jsonSchema
    */
   public String getJsonSchema()
@@ -190,7 +190,7 @@ public class JsonParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Sets jsonSchema to be used during validation
-   * 
+   *
    * @param jsonSchema
    *          schema as a string
    */
@@ -201,7 +201,7 @@ public class JsonParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Get errorTupleCount
-   * 
+   *
    * @return errorTupleCount
    */
   @VisibleForTesting
@@ -212,7 +212,7 @@ public class JsonParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Get emittedObjectCount
-   * 
+   *
    * @return emittedObjectCount
    */
   @VisibleForTesting
@@ -223,7 +223,7 @@ public class JsonParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Get incomingTuplesCount
-   * 
+   *
    * @return incomingTuplesCount
    */
   @VisibleForTesting
@@ -234,7 +234,7 @@ public class JsonParser extends Parser<byte[], KeyValPair<String, String>>
 
   /**
    * Set schema.
-   * 
+   *
    * @param schema
    */
   @VisibleForTesting

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/r/REngineConnectable.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/r/REngineConnectable.java b/contrib/src/main/java/com/datatorrent/contrib/r/REngineConnectable.java
index a1a23b7..3de0055 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/r/REngineConnectable.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/r/REngineConnectable.java
@@ -30,7 +30,7 @@ import com.datatorrent.lib.db.Connectable;
 import com.datatorrent.netlet.util.DTThrowable;
 
 /**
- * @since 2.1.0 
+ * @since 2.1.0
  */
 public class REngineConnectable implements Connectable
 {
@@ -57,7 +57,7 @@ public class REngineConnectable implements Connectable
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see com.datatorrent.lib.db.Connectable#connect()
    */
   @Override
@@ -82,7 +82,7 @@ public class REngineConnectable implements Connectable
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see com.datatorrent.lib.db.Connectable#disconnect()
    */
   @Override
@@ -95,7 +95,7 @@ public class REngineConnectable implements Connectable
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see com.datatorrent.lib.db.Connectable#isConnected()
    */
   @Override

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/r/RScript.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/r/RScript.java b/contrib/src/main/java/com/datatorrent/contrib/r/RScript.java
index b562641..e171336 100755
--- a/contrib/src/main/java/com/datatorrent/contrib/r/RScript.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/r/RScript.java
@@ -48,16 +48,16 @@ import com.datatorrent.netlet.util.DTThrowable;
  * 5. set the type of arguments being passed. This will be done in a Map <br>
  * 6. Send the data in the form of a tuple consisting of a key:value pair where, "key" represents the name of the
  *    argument "value" represents the actual value of the argument. A map of all the arguments is created and passed as
- *    input. <br> <br> 
- *    
+ *    input. <br> <br>
+ *
  *    The result will be returned on one of the output ports depending on the type of the return value.
  * <br> <br>
- * 
+ *
  * <b> Sample Usage Code : </b> oper is an object of type RScript. Create it by passing  <br> < name of the R script with
  * path from classpath>, < name of the function to be invoked>, < name of the return variable>);
  * <br> <br>
  * Map<String, RScript.REXP_TYPE> argTypeMap = new HashMap<String, RScript.REXP_TYPE>();  <br>
- * argTypeMap.put(< argument name>, RScript.< argument type in the form of REXP_TYPE>); <br> 
+ * argTypeMap.put(< argument name>, RScript.< argument type in the form of REXP_TYPE>); <br>
  * argTypeMap.put(< argument name>, RScript.< argument type in the form of REXP_TYPE>);  <br>
  * ...... <br>
  *

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQInputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQInputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQInputOperator.java
index 847602e..08157bc 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQInputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQInputOperator.java
@@ -101,7 +101,7 @@ public abstract class AbstractRabbitMQInputOperator<T> implements
   protected transient Channel channel;
   protected transient TracingConsumer tracingConsumer;
   protected transient String cTag;
-  
+
   protected transient ArrayBlockingQueue<KeyValPair<Long,byte[]>> holdingBuffer;
   private WindowDataManager windowDataManager;
   protected final transient Map<Long, byte[]> currentWindowRecoveryState;
@@ -109,7 +109,7 @@ public abstract class AbstractRabbitMQInputOperator<T> implements
   private transient final Set<Long> recoveredTags;
   private transient long currentWindowId;
   private transient int operatorContextId;
-  
+
   public AbstractRabbitMQInputOperator()
   {
     currentWindowRecoveryState = new HashMap<Long, byte[]>();
@@ -118,7 +118,7 @@ public abstract class AbstractRabbitMQInputOperator<T> implements
     windowDataManager = new WindowDataManager.NoopWindowDataManager();
   }
 
-  
+
 /**
  * define a consumer which can asynchronously receive data,
  * and added to holdingBuffer
@@ -162,7 +162,7 @@ public abstract class AbstractRabbitMQInputOperator<T> implements
         }
         return;
       }
-      
+
       // Acknowledgements are sent at the end of the window after adding to idempotency manager
       pendingAck.add(tag);
       holdingBuffer.add(new KeyValPair<Long, byte[]>(tag, body));
@@ -196,7 +196,7 @@ public abstract class AbstractRabbitMQInputOperator<T> implements
   }
 
   @SuppressWarnings("unchecked")
-  private void replay(long windowId) {      
+  private void replay(long windowId) {
     Map<Long, byte[]> recoveredData;
     try {
       recoveredData = (Map<Long, byte[]>)this.windowDataManager.retrieve(windowId);
@@ -212,7 +212,7 @@ public abstract class AbstractRabbitMQInputOperator<T> implements
     }
   }
 
-  
+
   @Override
   public void endWindow()
   {
@@ -221,25 +221,25 @@ public abstract class AbstractRabbitMQInputOperator<T> implements
     KeyValPair<Long, byte[]> message;
     while ((message = holdingBuffer.poll()) != null) {
       currentWindowRecoveryState.put(message.getKey(), message.getValue());
-      emitTuple(message.getValue());      
+      emitTuple(message.getValue());
     }
-    
+
     try {
       this.windowDataManager.save(currentWindowRecoveryState, currentWindowId);
     } catch (IOException e) {
       DTThrowable.rethrow(e);
     }
-    
+
     currentWindowRecoveryState.clear();
-    
+
     for (Long deliveryTag : pendingAck) {
       try {
         channel.basicAck(deliveryTag, false);
-      } catch (IOException e) {        
+      } catch (IOException e) {
         DTThrowable.rethrow(e);
       }
     }
-    
+
     pendingAck.clear();
   }
 
@@ -391,15 +391,15 @@ public abstract class AbstractRabbitMQInputOperator<T> implements
   {
     this.routingKey = routingKey;
   }
-  
+
   public WindowDataManager getWindowDataManager() {
     return windowDataManager;
   }
-  
+
   public void setWindowDataManager(WindowDataManager windowDataManager) {
     this.windowDataManager = windowDataManager;
   }
-  
+
 
 
 }


[5/6] apex-malhar git commit: Fix trailing whitespace.

Posted by vr...@apache.org.
http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQOutputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQOutputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQOutputOperator.java
index a19417c..2bbb903 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQOutputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/AbstractRabbitMQOutputOperator.java
@@ -74,7 +74,7 @@ public class AbstractRabbitMQOutputOperator extends BaseOperator
   transient Channel channel = null;
   transient String exchange = "testEx";
   transient String queueName="testQ";
-  
+
   private WindowDataManager windowDataManager;
   private transient long currentWindowId;
   private transient long largestRecoveryWindowId;
@@ -86,7 +86,7 @@ public class AbstractRabbitMQOutputOperator extends BaseOperator
   @Override
   public void setup(OperatorContext context)
   {
-    // Needed to setup idempotency storage manager in setter 
+    // Needed to setup idempotency storage manager in setter
     this.context = context;
     this.operatorContextId = context.getId();
 
@@ -104,11 +104,11 @@ public class AbstractRabbitMQOutputOperator extends BaseOperator
       DTThrowable.rethrow(ex);
     }
   }
-  
+
   @Override
   public void beginWindow(long windowId)
   {
-    currentWindowId = windowId;    
+    currentWindowId = windowId;
     largestRecoveryWindowId = windowDataManager.getLargestCompletedWindow();
     if (windowId <= largestRecoveryWindowId) {
       // Do not resend already sent tuples
@@ -119,7 +119,7 @@ public class AbstractRabbitMQOutputOperator extends BaseOperator
       skipProcessingTuple = false;
     }
   }
-  
+
   /**
    * {@inheritDoc}
    */
@@ -158,11 +158,11 @@ public class AbstractRabbitMQOutputOperator extends BaseOperator
       logger.debug(ex.toString());
     }
   }
-  
+
   public WindowDataManager getWindowDataManager() {
     return windowDataManager;
   }
-  
+
   public void setWindowDataManager(WindowDataManager windowDataManager) {
     this.windowDataManager = windowDataManager;
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperator.java
index 74ae181..1ddd9d4 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperator.java
@@ -38,7 +38,7 @@ import com.datatorrent.netlet.util.DTThrowable;
 public class RabbitMQOutputOperator extends AbstractSinglePortRabbitMQOutputOperator<byte[]>
 {
   private static final Logger logger = LoggerFactory.getLogger(RabbitMQOutputOperator.class);
-  
+
   @Override
   public void processTuple(byte[] tuple)
   {
@@ -46,6 +46,6 @@ public class RabbitMQOutputOperator extends AbstractSinglePortRabbitMQOutputOper
       channel.basicPublish(exchange, "", null, tuple);
     } catch (IOException e) {
       DTThrowable.rethrow(e);
-    }   
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/redis/AbstractRedisInputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/redis/AbstractRedisInputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/redis/AbstractRedisInputOperator.java
index 59b320d..0b12574 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/redis/AbstractRedisInputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/redis/AbstractRedisInputOperator.java
@@ -38,7 +38,7 @@ import com.datatorrent.lib.db.AbstractKeyValueStoreInputOperator;
 
 /**
  * This is the base implementation of a Redis input operator.
- * 
+ *
  * @displayName Abstract Redis Input
  * @category Input
  * @tags redis, key value
@@ -161,7 +161,7 @@ public abstract class AbstractRedisInputOperator<T> extends AbstractKeyValueStor
     scanComplete = false;
     scanParameters = new ScanParams();
     scanParameters.count(scanCount);
-    
+
     // For the 1st window after checkpoint, windowID - 1 would not have recovery
     // offset stored in windowDataManager
     // But recoveryOffset is non-transient, so will be recovered with

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/redis/RedisKeyValueInputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/redis/RedisKeyValueInputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/redis/RedisKeyValueInputOperator.java
index de9ee45..ae8ef5c 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/redis/RedisKeyValueInputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/redis/RedisKeyValueInputOperator.java
@@ -28,7 +28,7 @@ import com.datatorrent.lib.util.KeyValPair;
  * This is the an implementation of a Redis input operator for fetching
  * Key-Value pair stored in Redis. It takes in keys to fetch and emits
  * corresponding <Key, Value> Pair. Value data type is String in this case.
- * 
+ *
  * @displayName Redis Input Operator for Key Value pair
  * @category Store
  * @tags input operator, key value

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/redis/RedisMapAsValueInputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/redis/RedisMapAsValueInputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/redis/RedisMapAsValueInputOperator.java
index a9913f9..156252b 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/redis/RedisMapAsValueInputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/redis/RedisMapAsValueInputOperator.java
@@ -25,8 +25,8 @@ import com.datatorrent.lib.util.KeyValPair;
 /**
  * This is the an implementation of a Redis input operator It takes in keys to
  * fetch and emits Values stored as Maps in Redis i.e. when value datatype in
- * Redis is HashMap 
- * 
+ * Redis is HashMap
+ *
  * @displayName Redis Input Operator for Map
  * @category Store
  * @tags input operator, key value

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/redis/RedisStore.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/redis/RedisStore.java b/contrib/src/main/java/com/datatorrent/contrib/redis/RedisStore.java
index 27b4fd8..b540779 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/redis/RedisStore.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/redis/RedisStore.java
@@ -192,7 +192,7 @@ public class RedisStore implements TransactionableKeyValueStore
   }
 
   /**
-   * Gets the stored Map for given the key, when the value data type is a map, stored with hmset  
+   * Gets the stored Map for given the key, when the value data type is a map, stored with hmset
    *
    * @param key
    * @return hashmap stored for the key.
@@ -329,8 +329,8 @@ public class RedisStore implements TransactionableKeyValueStore
       }
     }
   }
-  
-  
+
+
 
   /**
    * @return the timeOut

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/solr/AbstractSolrOutputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/solr/AbstractSolrOutputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/solr/AbstractSolrOutputOperator.java
index 705e09c..805238c 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/solr/AbstractSolrOutputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/solr/AbstractSolrOutputOperator.java
@@ -84,7 +84,7 @@ public abstract class AbstractSolrOutputOperator<T, S extends Connectable> exten
 
   /**
    * Converts the object into Solr document format
-   * 
+   *
    * @param object to be stored to Solr Server
    * @return
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/solr/ConcurrentUpdateSolrServerConnector.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/solr/ConcurrentUpdateSolrServerConnector.java b/contrib/src/main/java/com/datatorrent/contrib/solr/ConcurrentUpdateSolrServerConnector.java
index 3e86949..d9237c2 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/solr/ConcurrentUpdateSolrServerConnector.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/solr/ConcurrentUpdateSolrServerConnector.java
@@ -110,7 +110,7 @@ public class ConcurrentUpdateSolrServerConnector extends SolrServerConnector
   }
 
   /*
-   * HttpClient instance 
+   * HttpClient instance
    * Gets the HTTP Client instance
    */
   public HttpClient getHttpClient()

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/splunk/SplunkStore.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/splunk/SplunkStore.java b/contrib/src/main/java/com/datatorrent/contrib/splunk/SplunkStore.java
index ede1f3a..3abdb1b 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/splunk/SplunkStore.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/splunk/SplunkStore.java
@@ -18,7 +18,7 @@
  */
 package com.datatorrent.contrib.splunk;
 
-import com.splunk.*; 
+import com.splunk.*;
 
 import javax.validation.constraints.NotNull;
 import org.slf4j.Logger;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/com/datatorrent/contrib/zmq/ZeroMQInputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/com/datatorrent/contrib/zmq/ZeroMQInputOperator.java b/contrib/src/main/java/com/datatorrent/contrib/zmq/ZeroMQInputOperator.java
index 15aaa0b..05d5e52 100644
--- a/contrib/src/main/java/com/datatorrent/contrib/zmq/ZeroMQInputOperator.java
+++ b/contrib/src/main/java/com/datatorrent/contrib/zmq/ZeroMQInputOperator.java
@@ -31,7 +31,7 @@ package com.datatorrent.contrib.zmq;
 public class ZeroMQInputOperator extends AbstractSinglePortZeroMQInputOperator<byte[]>
 {
   @Override
-  public byte[] getTuple(byte[] message) {  	
+  public byte[] getTuple(byte[] message) {
     return message;
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Change.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Change.java b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Change.java
index 146a65d..51d90ab 100644
--- a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Change.java
+++ b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Change.java
@@ -25,7 +25,7 @@ import com.datatorrent.lib.util.BaseNumberValueOperator;
 
 /**
  * Operator compares data values arriving on input port with base value input operator.
- * 
+ *
  * <p>
  * Arriving base value is stored in operator for comparison, old base value is overwritten.&nbsp;
  * This emits &lt;change in value,percentage change&gt;.
@@ -80,7 +80,7 @@ public class Change<V extends Number> extends BaseNumberValueOperator<V>
       }
     }
   };
-        
+
         /**
    * Input port that takes a number&nbsp; It stores the value for base comparison.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/CompareExceptMap.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/CompareExceptMap.java b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/CompareExceptMap.java
index 155cb23..bfa3c0a 100644
--- a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/CompareExceptMap.java
+++ b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/CompareExceptMap.java
@@ -31,7 +31,7 @@ import com.datatorrent.lib.util.UnifierHashMap;
  * Operator compares based on the property "key", "value", and "compare".
  * <p>
  * The comparison is done by getting double value from the Number.
- * Passed tuples are emitted on the output port "compare".&nbsp; 
+ * Passed tuples are emitted on the output port "compare".&nbsp;
  * Failed tuples are emitted on port "except".
  * Both output ports are optional, but at least one has to be connected.
  * This module is a pass through<br>
@@ -91,7 +91,7 @@ public class CompareExceptMap<K, V extends Number> extends MatchMap<K, V>
    */
   @OutputPortFieldAnnotation(optional = true)
   public final transient DefaultOutputPort<HashMap<K, V>> compare = match;
-  
+
   /**
    * Output port that emits a hashmap of non matching tuples after comparison.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/ExceptMap.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/ExceptMap.java b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/ExceptMap.java
index 3dcae74..2dcb583 100644
--- a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/ExceptMap.java
+++ b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/ExceptMap.java
@@ -66,7 +66,7 @@ import com.datatorrent.lib.util.UnifierHashMap;
 @Deprecated
 @Stateless
 public class ExceptMap<K, V extends Number> extends MatchMap<K, V>
-{       
+{
         /**
          * Output port that emits non matching number tuples.
          */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Quotient.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Quotient.java b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Quotient.java
index e1deb9d..8909acd 100644
--- a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Quotient.java
+++ b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/Quotient.java
@@ -24,7 +24,7 @@ import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.lib.util.BaseNumberValueOperator;
 
 /**
- * This operator adds all the values on "numerator" and "denominator" and emits quotient at end of window. 
+ * This operator adds all the values on "numerator" and "denominator" and emits quotient at end of window.
  * <p>
  * <br>
  * <b>StateFull : Yes </b>, Sum of values is taken over application window. <br>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/QuotientMap.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/QuotientMap.java b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/QuotientMap.java
index 3581b81..b37bbd5 100644
--- a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/QuotientMap.java
+++ b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/QuotientMap.java
@@ -31,7 +31,7 @@ import com.datatorrent.api.annotation.OperatorAnnotation;
 import com.datatorrent.lib.util.BaseNumberKeyValueOperator;
 
 /**
- * Add all the values for each key on "numerator" and "denominator" and emits quotient at end of window for all keys in the denominator. 
+ * Add all the values for each key on "numerator" and "denominator" and emits quotient at end of window for all keys in the denominator.
  * <p>
  * <br>
  * Application can set multiplication value for quotient(default = 1). <br>

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/SumCountMap.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/SumCountMap.java b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/SumCountMap.java
index 048eff7..b2493a1 100644
--- a/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/SumCountMap.java
+++ b/contrib/src/main/java/org/apache/apex/malhar/contrib/misc/math/SumCountMap.java
@@ -178,7 +178,7 @@ public class SumCountMap<K, V extends Number> extends
       return ret;
     }
   };
-        
+
         /**
    * Key,short sum output port.
    */
@@ -194,7 +194,7 @@ public class SumCountMap<K, V extends Number> extends
       return ret;
     }
   };
-        
+
         /**
    * Key,float sum output port.
    */
@@ -210,7 +210,7 @@ public class SumCountMap<K, V extends Number> extends
       return ret;
     }
   };
-        
+
         /**
    * Key,integer sum output port.
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/main/java/org/apache/apex/malhar/contrib/parser/StreamingJsonParser.java
----------------------------------------------------------------------
diff --git a/contrib/src/main/java/org/apache/apex/malhar/contrib/parser/StreamingJsonParser.java b/contrib/src/main/java/org/apache/apex/malhar/contrib/parser/StreamingJsonParser.java
index 38a4804..1f8dc5c 100644
--- a/contrib/src/main/java/org/apache/apex/malhar/contrib/parser/StreamingJsonParser.java
+++ b/contrib/src/main/java/org/apache/apex/malhar/contrib/parser/StreamingJsonParser.java
@@ -61,7 +61,7 @@ import com.datatorrent.lib.util.PojoUtils;
  * <b>err</b>:tuples that could not be parsed are emitted on this port as
  * KeyValPair<String,String><br>
  * Key being the tuple and Val being the reason
- * 
+ *
  * @displayName SimpleStreamingJsonParser
  * @category Parsers
  * @tags json pojo parser streaming
@@ -187,7 +187,7 @@ public class StreamingJsonParser extends Parser<byte[], KeyValPair<String, Strin
 
   /**
    * Creates a map representing fieldName in POJO:field in JSON:Data type
-   * 
+   *
    * @return List of FieldInfo
    */
   private List<FieldInfo> createFieldInfoMap(String str)
@@ -255,7 +255,7 @@ public class StreamingJsonParser extends Parser<byte[], KeyValPair<String, Strin
   /**
    * Use reflection to generate field info values if the user has not provided
    * the inputs mapping
-   * 
+   *
    * @return String representing the POJO field to JSON field mapping
    */
   private String generateFieldInfoInputs(Class<?> cls)
@@ -331,7 +331,7 @@ public class StreamingJsonParser extends Parser<byte[], KeyValPair<String, Strin
   /**
    * Returns a POJO from a Generic Record Null is set as the default value if a
    * key is not found in the parsed JSON
-   * 
+   *
    * @return Object
    */
   @SuppressWarnings("unchecked")

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/couchbase/CouchBaseSetTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/couchbase/CouchBaseSetTest.java b/contrib/src/test/java/com/datatorrent/contrib/couchbase/CouchBaseSetTest.java
index 9c99ad2..f57279d 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/couchbase/CouchBaseSetTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/couchbase/CouchBaseSetTest.java
@@ -90,7 +90,7 @@ public class CouchBaseSetTest
       System.err.println("Error connecting to Couchbase: " + e.getMessage());
       System.exit(1);
     }
-    
+
     TestPojo obj = new TestPojo();
     obj.setName("test");
     obj.setPhone(123344555);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchOperatorTest.java
index 161fe90..671d7dc 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchOperatorTest.java
@@ -89,7 +89,7 @@ public class ElasticSearchOperatorTest
     ElasticSearchMapOutputOperator<Map<String, Object>> operator = new ElasticSearchMapOutputOperator<Map<String, Object>>() {
       /*
        * (non-Javadoc)
-       * 
+       *
        * @see com.datatorrent.contrib.elasticsearch. AbstractElasticSearchOutputOperator #processTuple(java.lang.Object)
        */
       @Override
@@ -128,7 +128,7 @@ public class ElasticSearchOperatorTest
 
   /**
    * Read data written to elastic search
-   * 
+   *
    * @param tupleIDs
    * @param testStartTime
    */
@@ -137,7 +137,7 @@ public class ElasticSearchOperatorTest
     ElasticSearchMapInputOperator<Map<String, Object>> operator = new ElasticSearchMapInputOperator<Map<String, Object>>() {
       /**
        * Set SearchRequestBuilder parameters specific to current window.
-       * 
+       *
        * @see com.datatorrent.contrib.elasticsearch.ElasticSearchMapInputOperator#getSearchRequestBuilder()
        */
       @Override

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolateTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolateTest.java b/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolateTest.java
index f707f1b..daf1602 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolateTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/elasticsearch/ElasticSearchPercolateTest.java
@@ -77,9 +77,9 @@ public class ElasticSearchPercolateTest
 
   /**
    * Register percolate queries on ElasticSearch
-   * 
+   *
    * @throws IOException
-   * 
+   *
    */
   private void registerPercolateQueries() throws IOException
   {
@@ -89,7 +89,7 @@ public class ElasticSearchPercolateTest
   }
 
   /**
-   * 
+   *
    */
   private void checkPercolateResponse()
   {
@@ -136,7 +136,7 @@ public class ElasticSearchPercolateTest
         matchIds.add(match.getId().toString());
       }
       Collections.sort(matchIds);
-      
+
       Assert.assertArrayEquals(matchIds.toArray(), matches[i]);
       i++;
     }
@@ -157,6 +157,6 @@ public class ElasticSearchPercolateTest
       //This indicates that elasticsearch is not running on a particular machine.
       //Silently ignore in this case.
     }
-    
+
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/geode/GeodeCheckpointStoreTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/geode/GeodeCheckpointStoreTest.java b/contrib/src/test/java/com/datatorrent/contrib/geode/GeodeCheckpointStoreTest.java
index 5c59622..5bde40c 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/geode/GeodeCheckpointStoreTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/geode/GeodeCheckpointStoreTest.java
@@ -59,7 +59,7 @@ public class GeodeCheckpointStoreTest
     store.setTableName(REGION_NAME);
     store.connect();
   }
-  
+
   @Test
   public void testSave() throws IOException
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOInputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOInputOperatorTest.java
index 4e6bb39..6a2f891 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOInputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOInputOperatorTest.java
@@ -54,7 +54,7 @@ public class HBasePOJOInputOperatorTest
     HBASEINPUT,
     OUTPUT
   };
-  
+
   public static class MyGenerator extends TupleGenerateCacheOperator<TestPOJO>
   {
     public MyGenerator()
@@ -84,7 +84,7 @@ public class HBasePOJOInputOperatorTest
   private HBaseStore store;
   private HBasePOJOPutOperator hbaseOutputOperator;
   private TestHBasePOJOInputOperator hbaseInputOperator;
-  
+
   @Before
   public void prepare() throws Exception
   {
@@ -93,13 +93,13 @@ public class HBasePOJOInputOperatorTest
     setupOperators();
     HBaseUtil.createTable( store.getConfiguration(), store.getTableName());
   }
-  
+
   @After
   public void cleanup() throws Exception
   {
     HBaseUtil.deleteTable( store.getConfiguration(), store.getTableName());
   }
-  
+
 
   @Test
   public void test() throws Exception
@@ -119,20 +119,20 @@ public class HBasePOJOInputOperatorTest
     // Create ActiveMQStringSinglePortOutputOperator
     MyGenerator generator = dag.addOperator( OPERATOR.GENERATOR.name(), MyGenerator.class);
     generator.setTupleNum( TUPLE_NUM );
-    
+
     hbaseOutputOperator = dag.addOperator( OPERATOR.HBASEOUTPUT.name(), hbaseOutputOperator );
 
     hbaseInputOperator = dag.addOperator(OPERATOR.HBASEINPUT.name(), hbaseInputOperator);
     dag.setOutputPortAttribute(hbaseInputOperator.outputPort, Context.PortContext.TUPLE_CLASS, TestPOJO.class);
-    
-    
+
+
     TupleCacheOutputOperator output = dag.addOperator(OPERATOR.OUTPUT.name(), TupleCacheOutputOperator.class);
-    
+
     // Connect ports
     dag.addStream("queue1", generator.outputPort, hbaseOutputOperator.input ).setLocality(DAG.Locality.NODE_LOCAL);
     dag.addStream("queue2", hbaseInputOperator.outputPort, output.inputPort ).setLocality(DAG.Locality.NODE_LOCAL);
-    
-    
+
+
     Configuration conf = new Configuration(false);
     lma.prepareDAG(app, conf);
 
@@ -158,10 +158,10 @@ public class HBasePOJOInputOperatorTest
         throw new RuntimeException("Testcase taking too long");
       }
     }
-    
+
     lc.shutdown();
 
-    
+
     validate( generator.getTuples(), output.getReceivedTuples() );
   }
 
@@ -173,11 +173,11 @@ public class HBasePOJOInputOperatorTest
     actual.removeAll(expected);
     Assert.assertTrue( "content not same.", actual.isEmpty() );
   }
-  
+
   protected void setupOperators()
   {
     TableInfo<HBaseFieldInfo> tableInfo = new TableInfo<HBaseFieldInfo>();
-    
+
     tableInfo.setRowOrIdExpression("row");
 
     List<HBaseFieldInfo> fieldsInfo = new ArrayList<HBaseFieldInfo>();
@@ -186,10 +186,10 @@ public class HBasePOJOInputOperatorTest
     fieldsInfo.add( new HBaseFieldInfo( "address", "address", SupportType.STRING, "f1") );
 
     tableInfo.setFieldsInfo(fieldsInfo);
-    
+
     hbaseInputOperator.setTableInfo(tableInfo);
     hbaseOutputOperator.setTableInfo(tableInfo);
-    
+
     store = new HBaseStore();
     store.setTableName("test");
     store.setZookeeperQuorum("localhost");
@@ -197,7 +197,7 @@ public class HBasePOJOInputOperatorTest
 
     hbaseInputOperator.setStore(store);
     hbaseOutputOperator.setStore(store);
-    
+
     OperatorContextTestHelper.TestIdOperatorContext context = new OperatorContextTestHelper.TestIdOperatorContext(
         OPERATOR_ID, new AttributeMap.DefaultAttributeMap());
     hbaseInputOperator.setup(context);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOPutOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOPutOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOPutOperatorTest.java
index 51dbadc..8c81560 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOPutOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/hbase/HBasePOJOPutOperatorTest.java
@@ -52,11 +52,11 @@ public class HBasePOJOPutOperatorTest
   private static final Logger logger = LoggerFactory.getLogger(HBasePOJOPutOperatorTest.class);
   public static final int TEST_SIZE = 15000;
   public static final int WINDOW_SIZE = 1500;
-  
+
   private HBasePOJOPutOperator operator;
-  
+
   private final long startWindowId = Calendar.getInstance().getTimeInMillis();
-  
+
   public HBasePOJOPutOperatorTest()
   {
   }
@@ -69,13 +69,13 @@ public class HBasePOJOPutOperatorTest
 
     createOrDeleteTable(operator.getStore(), false );
   }
-  
+
   @After
   public void cleanup() throws Exception
   {
     createOrDeleteTable(operator.getStore(), true );
   }
-  
+
   /**
    * this test case only test if HBasePojoPutOperator can save data to the
    * HBase. it doesn't test connection to the other operators
@@ -107,7 +107,7 @@ public class HBasePOJOPutOperatorTest
 
       Thread.sleep(30000);
 
-      
+
     }
     catch (Exception e)
     {
@@ -115,7 +115,7 @@ public class HBasePOJOPutOperatorTest
       Assert.fail(e.getMessage());
     }
   }
-  
+
   protected void createOrDeleteTable(HBaseStore store, boolean isDelete ) throws Exception
   {
     HBaseAdmin admin = null;
@@ -123,7 +123,7 @@ public class HBasePOJOPutOperatorTest
     {
       admin = new HBaseAdmin(store.getConfiguration());
       final String tableName = store.getTableName();
-      
+
       if (!admin.isTableAvailable(tableName) && !isDelete )
       {
         HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
@@ -170,14 +170,14 @@ public class HBasePOJOPutOperatorTest
 
     OperatorContextTestHelper.TestIdOperatorContext context = new OperatorContextTestHelper.TestIdOperatorContext(
         OPERATOR_ID, attributeMap);
-    
+
     operator.setup(context);
   }
 
   protected void configure(HBasePOJOPutOperator operator)
   {
     TableInfo<HBaseFieldInfo> tableInfo = new TableInfo<HBaseFieldInfo>();
-    
+
     tableInfo.setRowOrIdExpression("row");
 
     List<HBaseFieldInfo> fieldsInfo = new ArrayList<HBaseFieldInfo>();
@@ -203,7 +203,7 @@ public class HBasePOJOPutOperatorTest
   {
     if( tupleGenerator == null )
       tupleGenerator = new TupleGenerator<TestPOJO>( TestPOJO.class );
-    
+
     return tupleGenerator.getNextTuple();
   }
 
@@ -225,21 +225,21 @@ public class HBasePOJOPutOperatorTest
       HTable table = operator.getStore().getTable();
       Scan scan = new Scan();
       ResultScanner resultScanner = table.getScanner(scan);
-      
+
       int recordCount = 0;
       while( true )
       {
         Result result = resultScanner.next();
         if( result == null )
           break;
-        
+
         int rowId = Integer.valueOf( Bytes.toString( result.getRow() ) );
         Assert.assertTrue( "rowId="+rowId+" aut of range" , ( rowId > 0 && rowId <= TEST_SIZE ) );
         Assert.assertTrue( "the rowId="+rowId+" already processed.", rowIds[rowId-1] == 1 );
         rowIds[rowId-1]=0;
-        
+
         List<Cell> cells = result.listCells();
-        
+
         Map<String, byte[]> map = new HashMap<String,byte[]>();
         for( Cell cell : cells )
         {
@@ -250,17 +250,17 @@ public class HBasePOJOPutOperatorTest
         TestPOJO read = TestPOJO.from(map);
         read.setRowId((long)rowId);
         TestPOJO expected = new TestPOJO( rowId );
-        
+
         Assert.assertTrue( String.format( "expected %s, get %s ", expected.toString(), read.toString() ), expected.completeEquals(read) );
         recordCount++;
       }
-      
+
       int missedCount = 0;
       if( recordCount != TEST_SIZE )
       {
         logger.error( "unsaved records: " );
         StringBuilder sb = new StringBuilder();
-        
+
         for( int i=0; i<TEST_SIZE; ++i )
         {
           if( rowIds[i] != 0 )

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseTransactionalPutOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseTransactionalPutOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseTransactionalPutOperatorTest.java
index eef69d4..3cdc1bf 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseTransactionalPutOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseTransactionalPutOperatorTest.java
@@ -66,7 +66,7 @@ public class HBaseTransactionalPutOperatorTest {
         }
 
         @Override
-        public AttributeMap getAttributes() {  
+        public AttributeMap getAttributes() {
           return null;
         }
 
@@ -136,7 +136,7 @@ public class HBaseTransactionalPutOperatorTest {
         }
 
         @Override
-        public AttributeMap getAttributes() {  
+        public AttributeMap getAttributes() {
           return null;
         }
 
@@ -210,7 +210,7 @@ public class HBaseTransactionalPutOperatorTest {
         }
 
         @Override
-        public AttributeMap getAttributes() {  
+        public AttributeMap getAttributes() {
           return null;
         }
 
@@ -238,8 +238,8 @@ public class HBaseTransactionalPutOperatorTest {
         }
       });
 
-      
-      
+
+
       thop.input.process(t2);
       thop.endWindow();
       HBaseTuple tuple,tuple2;
@@ -257,7 +257,7 @@ public class HBaseTransactionalPutOperatorTest {
       logger.error(e.getMessage());
     }
   }
-  
+
   public static class TestHBasePutOperator extends
   AbstractHBaseWindowPutOutputOperator<HBaseTuple> {
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseUtil.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseUtil.java b/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseUtil.java
index 9c237d7..5b54f3e 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseUtil.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/hbase/HBaseUtil.java
@@ -29,13 +29,13 @@ import org.apache.hadoop.hbase.client.HBaseAdmin;
 
 public class HBaseUtil
 {
-  public static void createTable(Configuration configuration, String tableName ) throws MasterNotRunningException, ZooKeeperConnectionException, IOException 
+  public static void createTable(Configuration configuration, String tableName ) throws MasterNotRunningException, ZooKeeperConnectionException, IOException
   {
     HBaseAdmin admin = null;
     try
     {
       admin = new HBaseAdmin( configuration );
-      
+
       if (!admin.isTableAvailable(tableName) )
       {
         HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
@@ -54,14 +54,14 @@ public class HBaseUtil
       }
     }
   }
-  
+
   public static void deleteTable( Configuration configuration, String tableName ) throws MasterNotRunningException, ZooKeeperConnectionException, IOException
   {
     HBaseAdmin admin = null;
     try
     {
       admin = new HBaseAdmin( configuration );
-      
+
       if ( admin.isTableAvailable(tableName) )
       {
         admin.disableTable(tableName);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/helper/MessageQueueTestHelper.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/helper/MessageQueueTestHelper.java b/contrib/src/test/java/com/datatorrent/contrib/helper/MessageQueueTestHelper.java
index e2eec7b..5465c28 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/helper/MessageQueueTestHelper.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/helper/MessageQueueTestHelper.java
@@ -47,7 +47,7 @@ public class MessageQueueTestHelper {
     }
   }
 
-  public static ArrayList<HashMap<String, Integer>> getMessages()  
+  public static ArrayList<HashMap<String, Integer>> getMessages()
   {
     ArrayList<HashMap<String, Integer>> mapList = new ArrayList<HashMap<String, Integer>>();
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaExactlyOnceOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaExactlyOnceOutputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaExactlyOnceOutputOperatorTest.java
index e20a9fe..5f32fb0 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaExactlyOnceOutputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaExactlyOnceOutputOperatorTest.java
@@ -45,7 +45,7 @@ public class KafkaExactlyOnceOutputOperatorTest extends KafkaOperatorTestBase
   private static final int maxTuple = 40;
   private static CountDownLatch latch;
   private static boolean isRestarted = false;
-  
+
    /**
    * Tuple generator for testing.
    */
@@ -68,7 +68,7 @@ public class KafkaExactlyOnceOutputOperatorTest extends KafkaOperatorTestBase
     @Override
     public void setup(OperatorContext context)
     {
-      
+
     }
 
     @Override
@@ -110,7 +110,7 @@ public class KafkaExactlyOnceOutputOperatorTest extends KafkaOperatorTestBase
       for (int i = stringBuffer.size(); i-- > 0;) {
         if (i == 20 && isRestarted == false) {
           isRestarted = true;
-          // fail the operator and when it gets back resend everything 
+          // fail the operator and when it gets back resend everything
           throw new RuntimeException();
         }
         outputPort.emit(stringBuffer.poll());
@@ -144,7 +144,7 @@ public class KafkaExactlyOnceOutputOperatorTest extends KafkaOperatorTestBase
 
     StringGeneratorInputOperator generator = dag.addOperator("TestStringGenerator", StringGeneratorInputOperator.class);
     final SimpleKafkaExactOnceOutputOperator node = dag.addOperator("Kafka message producer", SimpleKafkaExactOnceOutputOperator.class);
-    
+
     Properties props = new Properties();
     props.setProperty("serializer.class", "kafka.serializer.StringEncoder");
     props.put("metadata.broker.list", "localhost:9092");
@@ -152,7 +152,7 @@ public class KafkaExactlyOnceOutputOperatorTest extends KafkaOperatorTestBase
     props.setProperty("queue.buffering.max.ms", "200");
     props.setProperty("queue.buffering.max.messages", "10");
     props.setProperty("batch.num.messages", "5");
-    
+
     node.setConfigProperties(props);
     // Set configuration parameters for Kafka
     node.setTopic("topic1");
@@ -160,14 +160,14 @@ public class KafkaExactlyOnceOutputOperatorTest extends KafkaOperatorTestBase
     // Connect ports
     dag.addStream("Kafka message", generator.outputPort, node.inputPort).setLocality(Locality.CONTAINER_LOCAL);
 
-    
+
     // Create local cluster
     final LocalMode.Controller lc = lma.getController();
     lc.runAsync();
 
     Future f = Executors.newFixedThreadPool(1).submit(listener);
     f.get(30, TimeUnit.SECONDS);
-    
+
     lc.shutdown();
 
     // Check values send vs received
@@ -176,9 +176,9 @@ public class KafkaExactlyOnceOutputOperatorTest extends KafkaOperatorTestBase
     Assert.assertEquals("First tuple", "testString 1", listener.getMessage(listener.holdingBuffer.peek()));
 
     listener.close();
-    
+
   }
-  
+
   public static class SimpleKafkaExactOnceOutputOperator extends AbstractExactlyOnceKafkaOutputOperator<String, String, String>{
 
     @Override
@@ -192,7 +192,7 @@ public class KafkaExactlyOnceOutputOperatorTest extends KafkaOperatorTestBase
     {
       return new Pair<String, String>(tuple.split("###")[0], tuple.split("###")[1]);
     }
-    
+
   }
 
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestPartitioner.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestPartitioner.java b/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestPartitioner.java
index 0e3e4e5..e409353 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestPartitioner.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestPartitioner.java
@@ -31,7 +31,7 @@ import kafka.utils.VerifiableProperties;
 public class KafkaTestPartitioner implements Partitioner
 {
   public KafkaTestPartitioner (VerifiableProperties props) {
-    
+
   }
   @Override
   public int partition(Object key, int num_Partitions)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestProducer.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestProducer.java b/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestProducer.java
index 0a72a2e..cbd946a 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestProducer.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/kafka/KafkaTestProducer.java
@@ -40,7 +40,7 @@ public class KafkaTestProducer implements Runnable
   private boolean hasPartition = false;
   private boolean hasMultiCluster = false;
   private List<String> messages;
-  
+
   private String producerType = "async";
 
   public int getSendCount()
@@ -95,7 +95,7 @@ public class KafkaTestProducer implements Runnable
       producer1 = null;
     }
   }
-  
+
   public KafkaTestProducer(String topic, boolean hasPartition) {
     this(topic, hasPartition, false);
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOperatorTestBase.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOperatorTestBase.java b/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOperatorTestBase.java
index f3f8478..c9948ba 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOperatorTestBase.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOperatorTestBase.java
@@ -44,7 +44,7 @@ public class KinesisOperatorTestBase
   protected transient AWSCredentialsProvider credentials = null;
 
   private static final Logger logger = LoggerFactory.getLogger(KinesisOperatorTestBase.class);
-  
+
   private void createClient()
   {
     credentials = new DefaultAWSCredentialsProviderChain();
@@ -56,27 +56,27 @@ public class KinesisOperatorTestBase
   {
     CreateStreamRequest streamRequest = null;
     createClient();
-    
+
     for( int i=0; i<100; ++i )
     {
-      try 
+      try
       {
         streamName = streamNamePrefix + i;
         streamRequest = new CreateStreamRequest();
         streamRequest.setStreamName(streamName);
         streamRequest.setShardCount(shardCount);
         client.createStream(streamRequest);
-  
+
         logger.info( "created stream {}.", streamName );
         Thread.sleep(30000);
-        
+
         break;
       }
       catch( ResourceInUseException riue )
       {
         logger.warn( "Resource is in use.", riue.getMessage() );
       }
-      catch (Exception e) 
+      catch (Exception e)
       {
         logger.error( "Got exception.", e );
         throw new RuntimeException(e);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOutputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOutputOperatorTest.java
index 368a191..b478b9f 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOutputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisOutputOperatorTest.java
@@ -52,7 +52,7 @@ public abstract class KinesisOutputOperatorTest< O extends AbstractKinesisOutput
     super.beforeTest();
   }
 
-  
+
   /**
    * Test AbstractKinesisOutputOperator (i.e. an output adapter for Kinesis, aka producer).
    * This module sends data into an ActiveMQ message bus.
@@ -94,7 +94,7 @@ public abstract class KinesisOutputOperatorTest< O extends AbstractKinesisOutput
 
     // Create ActiveMQStringSinglePortOutputOperator
     G generator = addGenerateOperator( dag );
-    
+
     O node = addTestingOperator( dag );
     configureTestingOperator( node );
 
@@ -119,13 +119,13 @@ public abstract class KinesisOutputOperatorTest< O extends AbstractKinesisOutput
       }
       catch( Exception e ){}
     }
-    
+
     if( listener != null )
       listener.setIsAlive(false);
-    
+
     if( listenerThread != null )
       listenerThread.join( 1000 );
-    
+
     lc.shutdown();
 
     // Check values send vs received
@@ -141,10 +141,10 @@ public abstract class KinesisOutputOperatorTest< O extends AbstractKinesisOutput
   protected KinesisTestConsumer createConsumerListener( String streamName )
   {
     KinesisTestConsumer listener = new KinesisTestConsumer(streamName);
-    
+
     return listener;
   }
-  
+
   protected void configureTestingOperator( O node )
   {
     node.setAccessKey(credentials.getCredentials().getAWSAccessKeyId());
@@ -152,7 +152,7 @@ public abstract class KinesisOutputOperatorTest< O extends AbstractKinesisOutput
     node.setBatchSize(500);
     node.setStreamName(streamName);
   }
-  
+
   protected abstract G addGenerateOperator( DAG dag );
   protected abstract DefaultOutputPort getOutputPortOfGenerator( G generator );
   protected abstract O addTestingOperator( DAG dag );

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisStringOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisStringOutputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisStringOutputOperatorTest.java
index 33f3179..f0a9eb7 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisStringOutputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisStringOutputOperatorTest.java
@@ -34,13 +34,13 @@ public class KinesisStringOutputOperatorTest extends KinesisOutputOperatorTest<
     return dag.addOperator("TestStringGenerator", StringGeneratorInputOperator.class);
     //StringGeneratorInputOperator generator =
   }
-  
+
   @Override
   protected DefaultOutputPort getOutputPortOfGenerator( StringGeneratorInputOperator generator )
   {
     return generator.outputPort;
   }
-  
+
   @Override
   protected KinesisStringOutputOperator addTestingOperator(DAG dag)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisTestConsumer.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisTestConsumer.java b/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisTestConsumer.java
index 448ce72..a1547c1 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisTestConsumer.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/kinesis/KinesisTestConsumer.java
@@ -48,11 +48,11 @@ public class KinesisTestConsumer implements Runnable
 
   private volatile boolean isAlive = true;
   private int receiveCount = 0;
-  
+
   private CountDownLatch doneLatch;
-  
+
   protected static final int MAX_TRY_TIMES = 30;
-  
+
   private void createClient()
   {
     AWSCredentialsProvider credentials = new DefaultAWSCredentialsProviderChain();
@@ -87,16 +87,16 @@ public class KinesisTestConsumer implements Runnable
     buffer.get(bytes);
     return new String(bytes);
   }
-   
+
   @Override
   public void run()
   {
     String iterator = prepareIterator();
-    
-    while (isAlive ) 
+
+    while (isAlive )
     {
       iterator = processNextIterator(iterator);
-      
+
       //sleep at least 1 second to avoid exceeding the limit on getRecords frequency
       try
       {
@@ -167,7 +167,7 @@ public class KinesisTestConsumer implements Runnable
       return;
     receiveCount += records.size();
     logger.debug("ReceiveCount= {}", receiveCount);
-    
+
     for( Record record : records )
     {
       holdingBuffer.add(record);
@@ -175,18 +175,18 @@ public class KinesisTestConsumer implements Runnable
       {
         processRecord( record );
       }
-      
+
       if( doneLatch != null )
         doneLatch.countDown();
     }
-    
+
   }
-  
+
   protected void processRecord( Record record )
   {
-    
+
   }
-  
+
   public void close()
   {
     isAlive = false;

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/memcache/MemcachePOJOOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/memcache/MemcachePOJOOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/memcache/MemcachePOJOOperatorTest.java
index 60db81c..2ae525c 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/memcache/MemcachePOJOOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/memcache/MemcachePOJOOperatorTest.java
@@ -34,16 +34,16 @@ import net.spy.memcached.AddrUtil;
 public class MemcachePOJOOperatorTest
 {
   public static final int TUPLE_SIZE = 1000;
-  
+
   private MemcacheStore store;
-  
+
   @Before
   public void setup()
   {
     store = new MemcacheStore();
     store.setServerAddresses(AddrUtil.getAddresses("localhost:11211") );
   }
-  
+
   public void cleanup()
   {
     if( store != null )
@@ -57,9 +57,9 @@ public class MemcachePOJOOperatorTest
         DTThrowable.rethrow(e);
       }
     }
-      
+
   }
-  
+
   @SuppressWarnings("unchecked")
   @Test
   public void testMemcacheOutputOperatorInternal() throws Exception
@@ -74,21 +74,21 @@ public class MemcachePOJOOperatorTest
     operator.setTableInfo( tableInfo );
 
     operator.setup(null);
-    
+
     TupleGenerator<TestPOJO> generator = new TupleGenerator<TestPOJO>( TestPOJO.class );
-    
+
     for( int i=0; i<TUPLE_SIZE; ++i )
     {
       operator.processTuple( generator.getNextTuple() );
     }
-    
+
     readDataAndVerify( operator.getStore(), generator );
   }
-  
+
   public void readDataAndVerify( MemcacheStore store, TupleGenerator<TestPOJO> generator )
   {
     generator.reset();
-    
+
     for( int i=0; i<TUPLE_SIZE; ++i )
     {
       TestPOJO expected = generator.getNextTuple();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/memsql/AbstractMemsqlInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/memsql/AbstractMemsqlInputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/memsql/AbstractMemsqlInputOperatorTest.java
index aaa1e52..54c8d93 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/memsql/AbstractMemsqlInputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/memsql/AbstractMemsqlInputOperatorTest.java
@@ -51,7 +51,7 @@ public class AbstractMemsqlInputOperatorTest
   public static final int NUM_WINDOWS = 10;
   public static final int DATABASE_SIZE = NUM_WINDOWS * BLAST_SIZE;
   public static final int OPERATOR_ID = 0;
-  
+
   public static void populateDatabase(MemsqlStore memsqlStore)
   {
     memsqlStore.connect();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperatorBenchmark.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperatorBenchmark.java b/contrib/src/test/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperatorBenchmark.java
index 6122477..a128181 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperatorBenchmark.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/rabbitmq/RabbitMQOutputOperatorBenchmark.java
@@ -32,6 +32,6 @@ public class RabbitMQOutputOperatorBenchmark extends RabbitMQOutputOperatorTest
   public void testDag() throws Exception
   {
     runTest(100000);
-    logger.debug("end of test");  
+    logger.debug("end of test");
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/redis/RedisInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/redis/RedisInputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/redis/RedisInputOperatorTest.java
index 010c534..6dcdfbe 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/redis/RedisInputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/redis/RedisInputOperatorTest.java
@@ -136,7 +136,7 @@ public class RedisInputOperatorTest
 
     RedisKeyValueInputOperator operator = new RedisKeyValueInputOperator();
     operator.setWindowDataManager(new FSWindowDataManager());
-    
+
     operator.setStore(operatorStore);
     operator.setScanCount(1);
     Attribute.AttributeMap attributeMap = new Attribute.AttributeMap.DefaultAttributeMap();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkInputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkInputOperatorTest.java
index 654899a..32a4f39 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkInputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkInputOperatorTest.java
@@ -26,7 +26,7 @@ import com.datatorrent.lib.helper.OperatorContextTestHelper;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
 /**
- * 
+ *
  * Unit test for splunk input operator. The test, queries splunk server for 100 rows and checks
  * how many rows are returned.
  *

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkTcpOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkTcpOutputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkTcpOutputOperatorTest.java
index 241f7e1..f1d9285 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkTcpOutputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/splunk/SplunkTcpOutputOperatorTest.java
@@ -25,7 +25,7 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
 import com.google.common.collect.Lists;
 
 /**
- * 
+ *
  * Unit test for splunk tcp output operator. The test sends 10 values to the splunk server and then
  * queries it for last 10 rows to check if the values are same or not.
  *

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/util/FieldValueSerializableGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/util/FieldValueSerializableGenerator.java b/contrib/src/test/java/com/datatorrent/contrib/util/FieldValueSerializableGenerator.java
index 2975c9c..7d34d71 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/util/FieldValueSerializableGenerator.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/util/FieldValueSerializableGenerator.java
@@ -35,18 +35,18 @@ import com.datatorrent.lib.util.PojoUtils.Setter;
 
 public class FieldValueSerializableGenerator< T extends FieldInfo> extends FieldValueGenerator<T>
 {
-  
+
   public static < T extends FieldInfo > FieldValueSerializableGenerator<T> getFieldValueGenerator(final Class<?> clazz, List<T> fieldInfos)
   {
     return new FieldValueSerializableGenerator(clazz, fieldInfos);
   }
-  
-  
+
+
   private static final Logger logger = LoggerFactory.getLogger( FieldValueGenerator.class );
   //it's better to same kryo instance for both de/serialize
   private Kryo _kryo = null;
   private Class<?> clazz;
-  
+
   private FieldValueSerializableGenerator(){}
 
   public FieldValueSerializableGenerator(Class<?> clazz, List<T> fieldInfos)
@@ -58,7 +58,7 @@ public class FieldValueSerializableGenerator< T extends FieldInfo> extends Field
   /**
    * get the object which is serialized.
    * this method will convert the object into a map from column name to column value and then serialize it
-   * 
+   *
    * @param obj
    * @return
    */
@@ -66,7 +66,7 @@ public class FieldValueSerializableGenerator< T extends FieldInfo> extends Field
   {
   //if don't have field information, just convert the whole object to byte[]
     Object convertObj = obj;
-    
+
     //if fields are specified, convert to map and then convert map to byte[]
     if( fieldGetterMap != null && !fieldGetterMap.isEmpty() )
     {
@@ -82,15 +82,15 @@ public class FieldValueSerializableGenerator< T extends FieldInfo> extends Field
     return os.toByteArray();
   }
 
-  
+
   public Object deserializeObject( byte[] bytes )
   {
     Object obj = getKryo().readClassAndObject( new Input( bytes ) );
-    
+
 
     if( fieldGetterMap == null || fieldGetterMap.isEmpty() )
       return obj;
-    
+
     // the obj in fact is a map, convert from map to object
     try
     {
@@ -114,7 +114,7 @@ public class FieldValueSerializableGenerator< T extends FieldInfo> extends Field
       return obj;
     }
   }
-  
+
   protected Kryo getKryo()
   {
     if( _kryo == null )

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/util/POJOTupleGenerateOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/util/POJOTupleGenerateOperator.java b/contrib/src/test/java/com/datatorrent/contrib/util/POJOTupleGenerateOperator.java
index e2fc5cb..d9f5079 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/util/POJOTupleGenerateOperator.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/util/POJOTupleGenerateOperator.java
@@ -32,7 +32,7 @@ public class POJOTupleGenerateOperator<T> implements InputOperator, ActivationLi
 {
   protected final int DEFAULT_TUPLE_NUM = 10000;
   public final transient DefaultOutputPort<T> outputPort = new DefaultOutputPort<T>();
-  
+
   private int tupleNum = DEFAULT_TUPLE_NUM;
   private int batchNum = 5;
   private TupleGenerator<T> tupleGenerator = null;
@@ -42,17 +42,17 @@ public class POJOTupleGenerateOperator<T> implements InputOperator, ActivationLi
   public POJOTupleGenerateOperator()
   {
   }
-  
+
   public POJOTupleGenerateOperator( Class<T> tupleClass )
   {
     this.tupleClass = tupleClass;
   }
-  
+
   public void setTupleType( Class<T> tupleClass )
   {
     this.tupleClass = tupleClass;
   }
-  
+
   @Override
   public void beginWindow(long windowId)
   {
@@ -96,39 +96,39 @@ public class POJOTupleGenerateOperator<T> implements InputOperator, ActivationLi
       catch( Exception e ){}
       return;
     }
-      
-    
+
+
     for( int i=0; i<batchNum; ++i )
     {
       int count = emitedTuples.get();
       if( count >= theTupleNum )
         return;
-      
+
       if( emitedTuples.compareAndSet(count, count+1) )
       {
-        T tuple = getNextTuple();        
+        T tuple = getNextTuple();
         outputPort.emit ( tuple );
         tupleEmitted( tuple );
-        
+
         if( count+1 == theTupleNum )
         {
           tupleEmitDone();
           return;
         }
       }
-      
+
     }
   }
-  
-  
+
+
   protected void tupleEmitted( T tuple ){}
   protected void tupleEmitDone(){}
-  
+
   public int getEmitedTupleCount()
   {
     return emitedTuples.get();
   }
-  
+
   public int getTupleNum()
   {
     return tupleNum;
@@ -137,7 +137,7 @@ public class POJOTupleGenerateOperator<T> implements InputOperator, ActivationLi
   {
     this.tupleNum = tupleNum;
   }
-  
+
   protected T getNextTuple()
   {
     if( tupleGenerator == null )

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/util/TestPOJO.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/util/TestPOJO.java b/contrib/src/test/java/com/datatorrent/contrib/util/TestPOJO.java
index 462c0b3..99910be 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/util/TestPOJO.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/util/TestPOJO.java
@@ -40,15 +40,15 @@ public class TestPOJO implements Serializable
     fieldsInfo.add( new FieldInfo( "name", "name", SupportType.STRING ) );
     fieldsInfo.add( new FieldInfo( "age", "age", SupportType.INTEGER ) );
     fieldsInfo.add( new FieldInfo( "address", "address", SupportType.STRING ) );
-    
+
     return fieldsInfo;
   }
-  
+
   public static String getRowExpression()
   {
     return "row";
   }
-  
+
   public static TestPOJO from( Map<String,byte[]> map )
   {
     TestPOJO testPOJO = new TestPOJO();
@@ -58,14 +58,14 @@ public class TestPOJO implements Serializable
     }
     return testPOJO;
   }
-  
+
   private Long rowId = null;
   private String name;
   private int age;
   private String address;
 
   public TestPOJO(){}
-  
+
   public TestPOJO(long rowId)
   {
     this(rowId, "name" + rowId, (int) rowId, "address" + rowId);
@@ -78,7 +78,7 @@ public class TestPOJO implements Serializable
     setAge(age);
     setAddress(address);
   }
-  
+
   public void setValue( String fieldName, byte[] value )
   {
     if( "row".equalsIgnoreCase(fieldName) )
@@ -148,7 +148,7 @@ public class TestPOJO implements Serializable
   {
     this.address = address;
   }
-  
+
   @Override
   public boolean equals( Object obj )
   {
@@ -156,7 +156,7 @@ public class TestPOJO implements Serializable
       return false;
     if( !( obj instanceof TestPOJO ) )
       return false;
-    
+
     return completeEquals( (TestPOJO)obj );
   }
 
@@ -172,7 +172,7 @@ public class TestPOJO implements Serializable
       return false;
     return true;
   }
-  
+
   public boolean completeEquals( TestPOJO other )
   {
     if( other == null )
@@ -183,7 +183,7 @@ public class TestPOJO implements Serializable
       return false;
     return true;
   }
-  
+
   public <T> boolean fieldEquals( T v1, T v2 )
   {
     if( v1 == null && v2 == null )
@@ -192,7 +192,7 @@ public class TestPOJO implements Serializable
       return false;
     return v1.equals( v2 );
   }
-  
+
   @Override
   public String toString()
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/util/TupleCacheOutputOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/util/TupleCacheOutputOperator.java b/contrib/src/test/java/com/datatorrent/contrib/util/TupleCacheOutputOperator.java
index 93dc189..4bd8c79 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/util/TupleCacheOutputOperator.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/util/TupleCacheOutputOperator.java
@@ -33,10 +33,10 @@ public class TupleCacheOutputOperator<T>  extends BaseOperator
 {
   private static final long serialVersionUID = 3090932382383138500L;
   private static final Logger logger = LoggerFactory.getLogger( TupleCacheOutputOperator.class );
-  
-  //one instance of TupleCacheOutputOperator map to one 
+
+  //one instance of TupleCacheOutputOperator map to one
   private static Map< String, List<?> > receivedTuplesMap = new HashMap< String, List<?>>();
-  
+
   public final transient DefaultInputPort<T> inputPort = new DefaultInputPort<T>() {
 
     @Override
@@ -45,14 +45,14 @@ public class TupleCacheOutputOperator<T>  extends BaseOperator
       processTuple( tuple );
     }
   };
-  
+
   private String uuid;
-  
+
   public TupleCacheOutputOperator()
   {
     uuid = java.util.UUID.randomUUID().toString();
   }
-  
+
   public String getUuid()
   {
     return uuid;
@@ -74,7 +74,7 @@ public class TupleCacheOutputOperator<T>  extends BaseOperator
   {
     return (List<T>)receivedTuplesMap.get(uuid);
   }
-  
+
   public static List<Object> getReceivedTuples( String uuid )
   {
     return (List<Object>)receivedTuplesMap.get(uuid);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerateCacheOperator.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerateCacheOperator.java b/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerateCacheOperator.java
index bfce6f5..8ee38dd 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerateCacheOperator.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerateCacheOperator.java
@@ -25,16 +25,16 @@ import java.util.Map;
 
 public class TupleGenerateCacheOperator<T> extends POJOTupleGenerateOperator<T>
 {
-  //one instance of TupleCacheOutputOperator map to one 
+  //one instance of TupleCacheOutputOperator map to one
   private static Map< String, List<?> > emittedTuplesMap = new HashMap< String, List<?>>();
 
   private String uuid;
-  
+
   public TupleGenerateCacheOperator()
   {
     uuid = java.util.UUID.randomUUID().toString();
   }
-  
+
   @SuppressWarnings("unchecked")
   protected void tupleEmitted( T tuple )
   {
@@ -46,7 +46,7 @@ public class TupleGenerateCacheOperator<T> extends POJOTupleGenerateOperator<T>
     }
     emittedTuples.add(tuple);
   }
-  
+
   @SuppressWarnings("unchecked")
   public List<T> getTuples()
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerator.java b/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerator.java
index cea81d5..844d31d 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerator.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/util/TupleGenerator.java
@@ -25,26 +25,26 @@ import org.slf4j.LoggerFactory;
 
 /**
  * This is a copy from contrib, should be merged later.
- * 
+ *
  */
 public class TupleGenerator<T>
 {
   private static final Logger logger = LoggerFactory.getLogger( TupleGenerator.class );
-      
+
   private volatile long rowId = 0;
   private Constructor<T> constructor;
-  
+
   private static Class<?>[] paramTypes = new Class<?>[]{ Long.class, long.class, Integer.class, int.class };
-  
+
   public TupleGenerator()
   {
   }
-  
+
   public TupleGenerator( Class<T> tupleClass )
   {
     useTupleClass( tupleClass );
   }
-  
+
   public void useTupleClass( Class<T> tupleClass )
   {
     for( Class<?> paramType : paramTypes )
@@ -59,7 +59,7 @@ public class TupleGenerator<T>
       throw new RuntimeException( "Not found proper constructor." );
     }
   }
-  
+
   protected Constructor<T> tryGetConstructor( Class<T> tupleClass, Class<?> parameterType )
   {
     try
@@ -71,17 +71,17 @@ public class TupleGenerator<T>
       return null;
     }
   }
-  
+
   public void reset()
   {
     rowId = 0;
   }
-  
+
   public T getNextTuple()
   {
     if( constructor == null )
       throw new RuntimeException( "Not found proper constructor." );
-    
+
     long curRowId = ++rowId;
     try
     {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQInputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQInputOperatorTest.java
index 3538891..1bb599a 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQInputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQInputOperatorTest.java
@@ -96,7 +96,7 @@ public class ZeroMQInputOperatorTest
               Thread.sleep(10);
             } else {
               break;
-            }			            
+            }
           }
         }
         catch (InterruptedException ex) {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageGenerator.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageGenerator.java b/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageGenerator.java
index 27cd278..6155ec3 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageGenerator.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageGenerator.java
@@ -51,7 +51,7 @@ class ZeroMQMessageGenerator {
 
   public void send(Object message)
   {
-    String msg = message.toString();   
+    String msg = message.toString();
     publisher.send(msg.getBytes(), 0);
   }
 
@@ -72,8 +72,8 @@ class ZeroMQMessageGenerator {
       ArrayList<HashMap<String, Integer>>  dataMaps = MessageQueueTestHelper.getMessages();
       for(int j =0; j < dataMaps.size(); j++)
       {
-        send(dataMaps.get(j));  
-      }     
+        send(dataMaps.get(j));
+      }
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageReceiver.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageReceiver.java b/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageReceiver.java
index f472828..bb04817 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageReceiver.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQMessageReceiver.java
@@ -56,9 +56,9 @@ final class ZeroMQMessageReceiver implements Runnable
   @Override
   public void run()
   {
-    logger.debug("receiver running");      
+    logger.debug("receiver running");
     while (!Thread.currentThread().isInterrupted() && !shutDown) {
-    	//logger.debug("receiver running in loop"); 
+    	//logger.debug("receiver running in loop");
       byte[] msg = subscriber.recv(ZMQ.NOBLOCK);
       // convert to HashMap and save the values for each key
       // then expect c to be 1000, b=20, a=2
@@ -68,7 +68,7 @@ final class ZeroMQMessageReceiver implements Runnable
     	  continue;
       }
       String str = new String(msg);
-      
+
       if (str.indexOf("{") == -1) {
         continue;
       }
@@ -85,7 +85,7 @@ final class ZeroMQMessageReceiver implements Runnable
   public void teardown()
   {
 	shutDown=true;
-	
+
 	syncclient.close();
     subscriber.close();
     context.term();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQOutputOperatorTest.java b/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQOutputOperatorTest.java
index b8332c0..41c5248 100644
--- a/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQOutputOperatorTest.java
+++ b/contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQOutputOperatorTest.java
@@ -47,7 +47,7 @@ public class ZeroMQOutputOperatorTest
     final int testNum = 3;
 
     runTest(testNum);
-    
+
     logger.debug("end of test");
   }
 
@@ -60,7 +60,7 @@ public class ZeroMQOutputOperatorTest
     collector.setUrl("tcp://*:5556");
     collector.setSyncUrl("tcp://*:5557");
     collector.setSUBSCRIBERS_EXPECTED(1);
-    
+
     dag.addStream("Stream", source.outPort, collector.inputPort).setLocality(Locality.CONTAINER_LOCAL);
 
     final LocalMode.Controller lc = lma.getController();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/FullOuterJoinOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/FullOuterJoinOperatorTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/FullOuterJoinOperatorTest.java
index 762d322..ce5ba33 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/FullOuterJoinOperatorTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/FullOuterJoinOperatorTest.java
@@ -41,11 +41,11 @@ public class FullOuterJoinOperatorTest
     CollectorTestSink sink = new CollectorTestSink();
     oper.outport.setSink(sink);
 
-    // set column join condition  
+    // set column join condition
     Condition cond = new JoinColumnEqualCondition("a", "a");
     oper.setJoinCondition(cond);
-    
-    // add columns  
+
+    // add columns
     oper.selectTable1Column(new ColumnIndex("b", null));
     oper.selectTable2Column(new ColumnIndex("c", null));
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/GroupByOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/GroupByOperatorTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/GroupByOperatorTest.java
index 6ad818e..0d2a2f5 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/GroupByOperatorTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/GroupByOperatorTest.java
@@ -85,7 +85,7 @@ public class GroupByOperatorTest
     tuple.put("b", 2);
     tuple.put("c", 7);
     oper.inport.process(tuple);
-    
+
     oper.endWindow();
     oper.teardown();
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/HavingOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/HavingOperatorTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/HavingOperatorTest.java
index 5b696f1..3c685ab 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/HavingOperatorTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/HavingOperatorTest.java
@@ -87,7 +87,7 @@ public class HavingOperatorTest
     tuple.put("b", 2);
     tuple.put("c", 7);
     oper.inport.process(tuple);
-    
+
     oper.endWindow();
     oper.teardown();
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/InnerJoinOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/InnerJoinOperatorTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/InnerJoinOperatorTest.java
index 8b4f923..18312d1 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/InnerJoinOperatorTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/InnerJoinOperatorTest.java
@@ -30,7 +30,7 @@ import com.datatorrent.lib.streamquery.index.ColumnIndex;
 import com.datatorrent.lib.testbench.CollectorTestSink;
 
 /**
- * 
+ *
  * Functional test for {@link com.datatorrent.lib.streamquery.InnerJoinOperator }.
  * @deprecated
  */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/LeftOuterJoinOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/LeftOuterJoinOperatorTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/LeftOuterJoinOperatorTest.java
index f78ba21..eb1ec6d 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/LeftOuterJoinOperatorTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/LeftOuterJoinOperatorTest.java
@@ -41,11 +41,11 @@ public class LeftOuterJoinOperatorTest
     CollectorTestSink sink = new CollectorTestSink();
     oper.outport.setSink(sink);
 
-    // set column join condition  
+    // set column join condition
     Condition cond = new JoinColumnEqualCondition("a", "a");
     oper.setJoinCondition(cond);
-    
-    // add columns  
+
+    // add columns
     oper.selectTable1Column(new ColumnIndex("b", null));
     oper.selectTable2Column(new ColumnIndex("c", null));
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/RightOuterJoinOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/RightOuterJoinOperatorTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/RightOuterJoinOperatorTest.java
index 8142276..70bc031 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/RightOuterJoinOperatorTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/RightOuterJoinOperatorTest.java
@@ -42,11 +42,11 @@ public class RightOuterJoinOperatorTest
     CollectorTestSink sink = new CollectorTestSink();
     oper.outport.setSink(sink);
 
-    // set column join condition  
+    // set column join condition
     Condition cond = new JoinColumnEqualCondition("a", "a");
     oper.setJoinCondition(cond);
-    
-    // add columns  
+
+    // add columns
     oper.selectTable1Column(new ColumnIndex("b", null));
     oper.selectTable2Column(new ColumnIndex("c", null));
 
@@ -83,7 +83,7 @@ public class RightOuterJoinOperatorTest
     tuple.put("b", 11);
     tuple.put("c", 12);
     oper.inport2.process(tuple);
-    
+
     oper.endWindow();
     oper.teardown();
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/SelectTopOperatorTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/SelectTopOperatorTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/SelectTopOperatorTest.java
index 90480cf..4b609c1 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/SelectTopOperatorTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/SelectTopOperatorTest.java
@@ -37,20 +37,20 @@ public class SelectTopOperatorTest
     oper.setTopValue(2);
     CollectorTestSink sink = new CollectorTestSink();
     oper.outport.setSink(sink);
-    
+
     oper.beginWindow(1);
     HashMap<String, Object> tuple = new HashMap<String, Object>();
     tuple.put("a", 0);
     tuple.put("b", 1);
     tuple.put("c", 2);
     oper.inport.process(tuple);
-    
+
     tuple = new HashMap<String, Object>();
     tuple.put("a", 1);
     tuple.put("b", 3);
     tuple.put("c", 4);
     oper.inport.process(tuple);
-    
+
     tuple = new HashMap<String, Object>();
     tuple.put("a", 1);
     tuple.put("b", 5);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/BetweenConditionTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/BetweenConditionTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/BetweenConditionTest.java
index 01465db..568aed9 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/BetweenConditionTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/BetweenConditionTest.java
@@ -79,7 +79,7 @@ public class BetweenConditionTest
     tuple.put("b", 7);
     tuple.put("c", 8);
     oper.inport.process(tuple);
-    
+
     oper.endWindow();
     oper.teardown();
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/CompoundConditionTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/CompoundConditionTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/CompoundConditionTest.java
index e160e5d..929d134 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/CompoundConditionTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/CompoundConditionTest.java
@@ -84,7 +84,7 @@ public class CompoundConditionTest
     tuple.put("b", 7);
     tuple.put("c", 8);
     oper.inport.process(tuple);
-    
+
     oper.endWindow();
     oper.teardown();
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/InConditionTest.java
----------------------------------------------------------------------
diff --git a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/InConditionTest.java b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/InConditionTest.java
index d641a1c..255389b 100644
--- a/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/InConditionTest.java
+++ b/contrib/src/test/java/org/apache/apex/malhar/contrib/misc/streamquery/advanced/InConditionTest.java
@@ -81,7 +81,7 @@ public class InConditionTest
     tuple.put("b", 7);
     tuple.put("c", 8);
     oper.inport.process(tuple);
-    
+
     oper.endWindow();
     oper.teardown();
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/AverageData.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/AverageData.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/AverageData.java
index b9132d8..3c74cc5 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/AverageData.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/AverageData.java
@@ -45,7 +45,7 @@ public class AverageData
 
   /**
    * This constructor takes the value of sum and count and initialize the local attributes to corresponding values
-   * 
+   *
    * @param count
    *          the value of count
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineInfo.java
----------------------------------------------------------------------
diff --git a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineInfo.java b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineInfo.java
index a5dda7e..6f02a24 100644
--- a/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineInfo.java
+++ b/demos/machinedata/src/main/java/com/datatorrent/demos/machinedata/data/MachineInfo.java
@@ -42,7 +42,7 @@ public class MachineInfo
 
   /**
    * This constructor takes MachineKey as input and initialize local attributes
-   * 
+   *
    * @param machineKey
    *          the MachineKey instance
    */
@@ -53,7 +53,7 @@ public class MachineInfo
 
   /**
    * This constructor takes MachineKey, cpu usage, ram usage, hdd usage as input and initialize local attributes
-   * 
+   *
    * @param machineKey
    *          the MachineKey instance
    * @param cpu
@@ -73,7 +73,7 @@ public class MachineInfo
 
   /**
    * This method returns the MachineKey
-   * 
+   *
    * @return
    */
   public MachineKey getMachineKey()
@@ -83,7 +83,7 @@ public class MachineInfo
 
   /**
    * This method sets the MachineKey
-   * 
+   *
    * @param machineKey
    *          the MachineKey instance
    */
@@ -94,7 +94,7 @@ public class MachineInfo
 
   /**
    * This method returns the CPU% usage
-   * 
+   *
    * @return
    */
   public int getCpu()
@@ -104,7 +104,7 @@ public class MachineInfo
 
   /**
    * This method sets the CPU% usage
-   * 
+   *
    * @param cpu
    *          the CPU% usage
    */
@@ -115,7 +115,7 @@ public class MachineInfo
 
   /**
    * This method returns the RAM% usage
-   * 
+   *
    * @return
    */
   public int getRam()
@@ -125,7 +125,7 @@ public class MachineInfo
 
   /**
    * This method sets the RAM% usage
-   * 
+   *
    * @param ram
    *          the RAM% usage
    */
@@ -136,7 +136,7 @@ public class MachineInfo
 
   /**
    * This method returns the HDD% usage
-   * 
+   *
    * @return
    */
   public int getHdd()
@@ -146,7 +146,7 @@ public class MachineInfo
 
   /**
    * This method sets the HDD% usage
-   * 
+   *
    * @param hdd
    *          the HDD% usage
    */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java
----------------------------------------------------------------------
diff --git a/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java b/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java
index 8964d84..f6708ba 100644
--- a/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java
+++ b/demos/mobile/src/main/java/com/datatorrent/demos/mobile/PhoneEntryOperator.java
@@ -59,8 +59,8 @@ public class PhoneEntryOperator extends BaseOperator
 
   /**
    * Sets the initial number of phones to display on the google map.
-   * 
-   * @param i the count of initial phone numbers to display 
+   *
+   * @param i the count of initial phone numbers to display
    */
   public void setInitialDisplayCount(int i)
   {
@@ -69,8 +69,8 @@ public class PhoneEntryOperator extends BaseOperator
 
   /**
    * Sets the range for the phone numbers generated by the operator.
-   * 
-   * @param i the range within which the phone numbers are randomly generated. 
+   *
+   * @param i the range within which the phone numbers are randomly generated.
    */
   public void setPhoneRange(Range<Integer> phoneRange)
   {
@@ -80,7 +80,7 @@ public class PhoneEntryOperator extends BaseOperator
 
   /**
    * Sets the max seed for random phone number generation
-   * 
+   *
    * @param i the number to initialize the random number phone generator.
    */
   public void setMaxSeedPhoneNumber(int number)


[2/6] apex-malhar git commit: Fix trailing whitespace.

Posted by vr...@apache.org.
http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java
index e5a2832..acbf93b 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/FileSplitterInputTest.java
@@ -141,7 +141,7 @@ public class FileSplitterInputTest
 
   @Rule
   public TestMeta testMeta = new TestMeta();
-  
+
   private void window1TestHelper() throws InterruptedException
   {
     testMeta.fileSplitterInput.beginWindow(1);
@@ -157,7 +157,7 @@ public class FileSplitterInputTest
       Assert.assertTrue("path: " + metadata.getFilePath(), testMeta.filePaths.contains(metadata.getFilePath()));
       Assert.assertNotNull("name: ", metadata.getFileName());
     }
-    
+
     testMeta.fileMetadataSink.collectedTuples.clear();
   }
 
@@ -256,7 +256,7 @@ public class FileSplitterInputTest
 
     testMeta.fileSplitterInput = KryoCloneUtils.cloneObject(testMeta.fileSplitterInput);
     testMeta.resetSinks();
-    
+
     testMeta.fileSplitterInput.setup(testMeta.context);
     testMeta.fileSplitterInput.beginWindow(1);
     Assert.assertEquals("Blocks", 12, testMeta.blockMetadataSink.collectedTuples.size());
@@ -325,22 +325,22 @@ public class FileSplitterInputTest
     Assert.assertEquals("window 2: blocks", 1, testMeta.blockMetadataSink.collectedTuples.size());
     testMeta.fileSplitterInput.teardown();
   }
-  
+
   private void blocksTestHelper() throws InterruptedException
   {
     testMeta.fileSplitterInput.beginWindow(1);
     testMeta.scanner.semaphore.acquire();
     testMeta.fileSplitterInput.emitTuples();
     testMeta.fileSplitterInput.endWindow();
-    
+
     Assert.assertEquals("Blocks", 10, testMeta.blockMetadataSink.collectedTuples.size());
-    
+
     for (int window = 2; window < 8; window++) {
       testMeta.fileSplitterInput.beginWindow(window);
       testMeta.fileSplitterInput.emitTuples();
       testMeta.fileSplitterInput.endWindow();
     }
-    
+
     int noOfBlocks = 0;
     for (int i = 0; i < 12; i++) {
       File testFile = new File(testMeta.dataDirectory, "file" + i + ".txt");
@@ -384,7 +384,7 @@ public class FileSplitterInputTest
   {
     FSWindowDataManager fsWindowDataManager = new FSWindowDataManager();
     testMeta.updateConfig(fsWindowDataManager, 500, 2L, 10);
-    
+
     testMeta.fileSplitterInput.setup(testMeta.context);
     recoveryTestHelper();
     testMeta.fileSplitterInput.teardown();
@@ -398,7 +398,7 @@ public class FileSplitterInputTest
     testMeta.fileSplitterInput.setup(testMeta.context);
 
     recoveryTestHelper();
-    
+
     Thread.sleep(1000);
     HashSet<String> lines = Sets.newHashSet();
     for (int line = 2; line < 4; line++) {
@@ -535,7 +535,7 @@ public class FileSplitterInputTest
   {
     FSWindowDataManager fsWindowDataManager = new FSWindowDataManager();
     testMeta.updateConfig(fsWindowDataManager, 500L, 2L, 2);
-    
+
     testMeta.fileSplitterInput.setup(testMeta.context);
     testMeta.fileSplitterInput.beginWindow(1);
 
@@ -613,7 +613,7 @@ public class FileSplitterInputTest
 
     Assert.assertEquals("window 2: files", 0, testMeta.fileMetadataSink.collectedTuples.size());
     Assert.assertEquals("window 2: blocks", 0, testMeta.blockMetadataSink.collectedTuples.size());
-    
+
     testMeta.fileSplitterInput.teardown();
   }
 
@@ -694,6 +694,6 @@ public class FileSplitterInputTest
       super.scanIterationComplete();
     }
   }
-  
+
   private static final Logger LOG = LoggerFactory.getLogger(FileSplitterInputTest.class);
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/io/fs/SynchronizerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/SynchronizerTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/SynchronizerTest.java
index b017f20..7bdee4f 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/SynchronizerTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/SynchronizerTest.java
@@ -39,7 +39,7 @@ public class SynchronizerTest
   public static final String[] FILE_NAMES = {"a.txt", "b.txt", "c.txt", "d.txt", "e.txt" };
 
   public static final long[][] BLOCK_IDS = {
-      //Block ids for file1 (a.txt) 
+      //Block ids for file1 (a.txt)
       {1001, 1002, 1003 },
       //Block ids for file2 (b.txt)
       {1004, 1005, 1006, 1007 },

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java b/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java
index 34ffecd..f273e4d 100644
--- a/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/io/fs/TailFsInputOperatorTest.java
@@ -192,7 +192,7 @@ public class TailFsInputOperatorTest
 
   /**
    * This tests the case when the file is rotated and new file has same size as old file
-   * 
+   *
    * @throws Exception
    */
 
@@ -231,7 +231,7 @@ public class TailFsInputOperatorTest
     oper.deactivate();
     file = new File(filePath);
     if (file.exists()) {
-      file.delete();      
+      file.delete();
     }
     file = new File(filePath + ".bk");
     if (file.exists()) {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java b/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java
index 532aef5..dfd9627 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/FilteredLineToTokenHashMapTest.java
@@ -28,7 +28,7 @@ import org.junit.Test;
 import com.datatorrent.lib.testbench.HashTestSink;
 
 /**
- * 
+ *
  * Functional tests for
  * {@link com.datatorrent.lib.logs.FilteredLineToTokenHashMap}
  */

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java b/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java
index 92575b7..ef38f51 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/LineToTokenArrayListTest.java
@@ -29,10 +29,10 @@ import org.junit.Test;
 import com.datatorrent.lib.testbench.ArrayListTestSink;
 
 /**
- * 
+ *
  * Functional tests for {@link com.datatorrent.lib.logs.LineToTokenArrayList}
  * <p>
- * 
+ *
  */
 public class LineToTokenArrayListTest
 {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java b/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java
index 20a799e..e9ab8fb 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/LineToTokenHashMapTest.java
@@ -28,7 +28,7 @@ import org.junit.Test;
 import com.datatorrent.lib.testbench.HashTestSink;
 
 /**
- * 
+ *
  * Functional tests for {@link com.datatorrent.lib.logs.LineToTokenHashMap}.
  */
 public class LineToTokenHashMapTest

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java b/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java
index 7b6613b..1382c55 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/LineTokenizerKeyValTest.java
@@ -27,7 +27,7 @@ import org.junit.Test;
 import com.datatorrent.lib.testbench.HashTestSink;
 
 /**
- * 
+ *
  * Functional tests for {@link com.datatorrent.lib.logs.LineTokenizerKeyVal}.
  */
 public class LineTokenizerKeyValTest

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java b/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java
index 94845a7..11d5622 100644
--- a/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java
+++ b/library/src/test/java/com/datatorrent/lib/logs/MultiWindowDimensionAggregationTest.java
@@ -37,7 +37,7 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  * <p>
  * MultiWindowDimensionAggregationTest class.
  * </p>
- * 
+ *
  */
 public class MultiWindowDimensionAggregationTest
 {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java b/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java
index 07a378c..28866a8 100644
--- a/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/MarginMapTest.java
@@ -29,10 +29,10 @@ import org.slf4j.LoggerFactory;
 import com.datatorrent.lib.testbench.CountAndLastTupleTestSink;
 
 /**
- * 
+ *
  * Functional tests for {@link com.datatorrent.lib.math.MarginMap}
  * <p>
- * 
+ *
  */
 public class MarginMapTest
 {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java b/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java
index f74e0c9..8608a96 100644
--- a/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java
+++ b/library/src/test/java/com/datatorrent/lib/math/SigmaTest.java
@@ -29,7 +29,7 @@ import com.datatorrent.lib.testbench.SumTestSink;
  *
  * Functional tests for {@link com.datatorrent.lib.math.Sigma}
  * <p>
- * 
+ *
  */
 
 public class SigmaTest

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java b/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java
index 10e0f5e..8030d97 100644
--- a/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java
+++ b/library/src/test/java/com/datatorrent/lib/multiwindow/SortedMovingWindowTest.java
@@ -38,7 +38,7 @@ import com.datatorrent.lib.testbench.CollectorTestSink;
  * 1. sort simple comparable tuples
  * 2. sort tuples by given order (Comparator)
  * 3. group tuples into different category and sort the category by a given order
- * 
+ *
  */
 public class SortedMovingWindowTest
 {
@@ -53,20 +53,20 @@ public class SortedMovingWindowTest
     CollectorTestSink<Object> testSink = new CollectorTestSink<Object>();
     smw.outputPort.setSink(testSink);
     smw.setup(null);
-    
+
     smw.setWindowSize(2);
-    // The incoming 6 integer tuples are disordered among 4 windows 
+    // The incoming 6 integer tuples are disordered among 4 windows
     emitObjects(smw, new Integer[][]{{1,3}, {2,5}, {4}, {6}});
     smw.beginWindow(4);
     smw.endWindow();
     smw.beginWindow(5);
     smw.endWindow();
-    
+
     // The outcome is sorted
     Assert.assertEquals(Lists.newArrayList(1, 2, 3, 4, 5, 6), testSink.collectedTuples);
-    
+
   }
-  
+
   /**
    * Given sorting key, sorting function, test sorting the map tuples within the sliding window
    */
@@ -92,7 +92,7 @@ public class SortedMovingWindowTest
     smw.setup(null);
     smw.setWindowSize(2);
 
-    // The incoming 6 simple map tuples are disordered among 4 windows 
+    // The incoming 6 simple map tuples are disordered among 4 windows
     emitObjects(smw, new Map[][]{createHashMapTuples(keys, new Integer[][]{{1}, {3}}),
         createHashMapTuples(keys, new Integer[][]{{2}, {5}}),
         createHashMapTuples(keys, new Integer[][]{{4}}), createHashMapTuples(keys, new Integer[][]{{6}})});
@@ -105,8 +105,8 @@ public class SortedMovingWindowTest
     Assert.assertEquals(Arrays.asList(createHashMapTuples(keys, new Integer[][]{{1}, {2}, {3}, {4}, {5}, {6}})),
         testSink.collectedTuples);
   }
-  
-  
+
+
   /**
    * Given grouping key, sorting key and sorting function, test sorting the map tuples within the sliding window
    */
@@ -127,7 +127,7 @@ public class SortedMovingWindowTest
         return (Integer)o1.get(keys[1]) - (Integer)o2.get(keys[1]);
       }
     });
-    
+
     smw.setFunction(new Function<Map<String,Object>, String>()
     {
       @Override
@@ -142,7 +142,7 @@ public class SortedMovingWindowTest
     smw.setup(null);
     smw.setWindowSize(2);
 
-    // The incoming 9 complex map tuples are disordered with same name among 4 windows 
+    // The incoming 9 complex map tuples are disordered with same name among 4 windows
     emitObjects(smw, new Map[][]{createHashMapTuples(keys, new Object[][]{{"bob", 1}, {"jim", 1}}),
         createHashMapTuples(keys, new Object[][]{{"jim", 2}, {"bob", 3}}),
         createHashMapTuples(keys, new Object[][]{{"bob", 2}, {"jim", 4}}),
@@ -156,7 +156,7 @@ public class SortedMovingWindowTest
     Assert.assertEquals(Arrays.asList(createHashMapTuples(keys,
         new Object[][]{{"bob", 1}, {"jim", 1}, {"jim", 2}, {"bob", 2}, {"bob", 3}, {"jim", 3}, {"jim", 4}, {"bob", 4}, {"bob", 5}})), testSink.collectedTuples);
   }
-  
+
   @SuppressWarnings({"rawtypes", "unchecked"})
   private void emitObjects(SortedMovingWindow win, Object[][] obj)
   {
@@ -168,11 +168,11 @@ public class SortedMovingWindowTest
       win.endWindow();
     }
   }
-  
+
   @SuppressWarnings({"rawtypes", "unchecked"})
   private Map[] createHashMapTuples(String[] cols, Object[][] values)
   {
-    
+
     HashMap[] maps = new HashMap[values.length];
     int index = -1;
     for (Object[] vs : values) {
@@ -182,8 +182,8 @@ public class SortedMovingWindowTest
         maps[index].put(cols[colIndex++], value);
       }
     }
-    
+
     return maps;
   }
-  
+
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/parser/XmlParserTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/parser/XmlParserTest.java b/library/src/test/java/com/datatorrent/lib/parser/XmlParserTest.java
index 5ce07f5..670d5b6 100644
--- a/library/src/test/java/com/datatorrent/lib/parser/XmlParserTest.java
+++ b/library/src/test/java/com/datatorrent/lib/parser/XmlParserTest.java
@@ -209,7 +209,7 @@ public class XmlParserTest
   @Test
   public void testXmlToPojoIncorrectXML()
   {
-    String tuple = "<EmployeeBean>" + "<firstname>john</firstname>" //incorrect field name is ignored by JAXB 
+    String tuple = "<EmployeeBean>" + "<firstname>john</firstname>" //incorrect field name is ignored by JAXB
         + "<dept>cs</dept>" + "<eid>1</eid>" + "<dateOfJoining>2015-01-01 00:00:00.00 IST</dateOfJoining>";
     // + "</EmployeeBean>"; // Incorrect XML format
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java b/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java
index 47fa2c2..b7979c6 100644
--- a/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/statistics/MeridianOperatorTest.java
@@ -43,7 +43,7 @@ public class MeridianOperatorTest
     oper.data.process(3.0);
     oper.data.process(9.0);
     oper.endWindow();
-    
+
     Assert.assertEquals("Must be one tuple in sink", sink.collectedTuples.size(), 1);
     Assert.assertTrue("Median value", sink.collectedTuples.get(0).doubleValue() == 5.0);
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java b/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java
index 26e94c6..d38766e 100644
--- a/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/statistics/ModeOperatorTest.java
@@ -45,7 +45,7 @@ public class ModeOperatorTest
     oper.data.process(7.0);
     oper.data.process(7.0);
     oper.endWindow();
-    
+
     Assert.assertEquals("Must be one tuple in sink", sink.collectedTuples.size(), 1);
     Assert.assertTrue("Median value", sink.collectedTuples.get(0) == 7.0);
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/statistics/StandardDeviationOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/statistics/StandardDeviationOperatorTest.java b/library/src/test/java/com/datatorrent/lib/statistics/StandardDeviationOperatorTest.java
index 3d2d99c..b32f202 100644
--- a/library/src/test/java/com/datatorrent/lib/statistics/StandardDeviationOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/statistics/StandardDeviationOperatorTest.java
@@ -36,7 +36,7 @@ public class StandardDeviationOperatorTest
     oper.variance.setSink(variance);
     CollectorTestSink<Object> deviation = new CollectorTestSink<Object>();
     oper.standardDeviation.setSink(deviation);
-    
+
     oper.setup(null);
     oper.beginWindow(0);
     oper.data.process(1.0);
@@ -44,7 +44,7 @@ public class StandardDeviationOperatorTest
     oper.data.process(3.0);
     oper.data.process(9.0);
     oper.endWindow();
-    
+
     Assert.assertEquals("Must be one tuple in sink", variance.collectedTuples.size(), 1);
     Assert.assertEquals("Must be one tuple in sink", deviation.collectedTuples.size(), 1);
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java b/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java
index f9589db..90066a6 100644
--- a/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/statistics/WeightedMeanOperatorTest.java
@@ -45,7 +45,7 @@ public class WeightedMeanOperatorTest
     oper.data.process(2.0);
     oper.data.process(4.0);
     oper.endWindow();
-    
+
     Assert.assertEquals("Must be one tuple in sink", sink.collectedTuples.size(), 1);
     Assert.assertTrue("Expected mean value", sink.collectedTuples.get(0) == 3.0);
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java b/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java
index 6266787..c027a21 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/DevNullCounterTest.java
@@ -25,7 +25,7 @@ import org.slf4j.LoggerFactory;
 import com.datatorrent.lib.testbench.EventGenerator;
 
 /**
- * 
+ *
  * Functional tests for {@link com.datatorrent.lib.testbench.DevNullCounter}.
  * <p>
  * <br>
@@ -35,7 +35,7 @@ import com.datatorrent.lib.testbench.EventGenerator;
  * Benchmarks:<br>
  * Object payload benchmarked at over 125 Million/sec <br>
  * DRC checks are validated<br>
- * 
+ *
  */
 public class DevNullCounterTest
 {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java b/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java
index 3bd9c11..a9563ca 100644
--- a/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java
+++ b/library/src/test/java/com/datatorrent/lib/stream/DevNullTest.java
@@ -25,7 +25,7 @@ import org.slf4j.LoggerFactory;
 import com.datatorrent.lib.testbench.EventGenerator;
 
 /**
- * Functional tests for {@link com.datatorrent.lib.testbench.DevNull}. 
+ * Functional tests for {@link com.datatorrent.lib.testbench.DevNull}.
  */
 public class DevNullTest
 {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java b/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java
index 4f231cb..c569bf8 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/ActiveMQMessageGenerator.java
@@ -36,7 +36,7 @@ import org.apache.activemq.ActiveMQConnectionFactory;
  * This is the message generator outside of Malhar/Hadoop. This generates data
  * and send to ActiveMQ message bus so that Malhar input adapter for ActiveMQ
  * can receive it.
- * 
+ *
  */
 public class ActiveMQMessageGenerator
 {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java b/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java
index 6876525..b9210f7 100644
--- a/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java
+++ b/library/src/test/java/com/datatorrent/lib/testbench/RandomEventGeneratorTest.java
@@ -76,7 +76,7 @@ public class RandomEventGeneratorTest
     node.integer_data.setSink(integer_data);
     CollectorTestSink string_data = new CollectorTestSink();
     node.string_data.setSink(string_data);
-    
+
     node.setup(null);
     node.beginWindow(1);
     node.emitTuples();

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/com/datatorrent/lib/util/TestUtils.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/com/datatorrent/lib/util/TestUtils.java b/library/src/test/java/com/datatorrent/lib/util/TestUtils.java
index 673054b..403072d 100644
--- a/library/src/test/java/com/datatorrent/lib/util/TestUtils.java
+++ b/library/src/test/java/com/datatorrent/lib/util/TestUtils.java
@@ -82,7 +82,7 @@ public class TestUtils
   {
     FileUtils.deleteQuietly(new File("target/" + description.getClassName()));
   }
-  
+
   @SuppressWarnings({"unchecked", "rawtypes"})
   public static <S extends Sink, T> S setSink(OutputPort<T> port, S sink)
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/dedup/DeduperPartitioningTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/dedup/DeduperPartitioningTest.java b/library/src/test/java/org/apache/apex/malhar/lib/dedup/DeduperPartitioningTest.java
index 479fbe8..fb03f2e 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/dedup/DeduperPartitioningTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/dedup/DeduperPartitioningTest.java
@@ -71,13 +71,13 @@ public class DeduperPartitioningTest
       dedup.setTimeExpression("eventTime.getTime()");
       dedup.setBucketSpan(60);
       dedup.setExpireBefore(600);
-      
+
       ConsoleOutputOperator console = dag.addOperator("Console", new ConsoleOutputOperator());
       dag.addStream("Generator to Dedup", gen.output, dedup.input);
       dag.addStream("Dedup to Console", dedup.unique, console.input);
       dag.setInputPortAttribute(dedup.input, Context.PortContext.TUPLE_CLASS, TestEvent.class);
       dag.setOutputPortAttribute(dedup.unique, Context.PortContext.TUPLE_CLASS, TestEvent.class);
-      dag.setAttribute(dedup, Context.OperatorContext.PARTITIONER, 
+      dag.setAttribute(dedup, Context.OperatorContext.PARTITIONER,
           new StatelessPartitioner<TimeBasedDedupOperator>(NUM_DEDUP_PARTITIONS));
     }
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/fs/FSRecordReaderTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/fs/FSRecordReaderTest.java b/library/src/test/java/org/apache/apex/malhar/lib/fs/FSRecordReaderTest.java
index ecaff70..8560228 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/fs/FSRecordReaderTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/fs/FSRecordReaderTest.java
@@ -90,10 +90,10 @@ public class FSRecordReaderTest
     LocalMode.Controller lc = lma.getController();
     lc.setHeartbeatMonitoringEnabled(true);
     lc.runAsync();
-    
+
     Set<String> expectedRecords = new HashSet<String>(Arrays.asList(FILE_1_DATA.split("\n")));
     expectedRecords.addAll(Arrays.asList(FILE_2_DATA.split("\n")));
-    
+
     while (DelimitedValidator.records.size() != expectedRecords.size()) {
       LOG.debug("Waiting for app to finish");
       Thread.sleep(1000);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/fs/GenericFileOutputOperatorTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/fs/GenericFileOutputOperatorTest.java b/library/src/test/java/org/apache/apex/malhar/lib/fs/GenericFileOutputOperatorTest.java
index 52d5c5a..6082f57 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/fs/GenericFileOutputOperatorTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/fs/GenericFileOutputOperatorTest.java
@@ -38,7 +38,7 @@ public class GenericFileOutputOperatorTest extends AbstractFileOutputOperatorTes
 
   /**
    * Test file rollover in case of idle windows
-   * 
+   *
    * @throws IOException
    */
   @Test
@@ -84,7 +84,7 @@ public class GenericFileOutputOperatorTest extends AbstractFileOutputOperatorTes
 
   /**
    * Test file rollover for tuple count
-   * 
+   *
    * @throws IOException
    */
   @Test
@@ -120,7 +120,7 @@ public class GenericFileOutputOperatorTest extends AbstractFileOutputOperatorTes
       checkOutput(i, testMeta.getDir() + "/output.txt_0", expected[i], true);
     }
   }
-  
+
   public static void checkOutput(int fileCount, String baseFilePath, String expectedOutput, boolean checkTmp)
   {
     if (fileCount >= 0) {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableByteArrayListMultimapImplTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableByteArrayListMultimapImplTest.java b/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableByteArrayListMultimapImplTest.java
index 2c9d7eb..22d317d 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableByteArrayListMultimapImplTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableByteArrayListMultimapImplTest.java
@@ -339,7 +339,7 @@ public class SpillableByteArrayListMultimapImplTest
     map.teardown();
     store.teardown();
   }
-  
+
   @Test
   public void testLoad()
   {
@@ -349,10 +349,10 @@ public class SpillableByteArrayListMultimapImplTest
     final int numOfEntry = 100000;
 
     SpillableStateStore store = testMeta.store;
-    
+
     SpillableByteArrayListMultimapImpl<String, String> multimap = new SpillableByteArrayListMultimapImpl<>(
         this.testMeta.store, ID1, 0L, new SerdeStringSlice(), new SerdeStringSlice());
-    
+
     Attribute.AttributeMap.DefaultAttributeMap attributes = new Attribute.AttributeMap.DefaultAttributeMap();
     attributes.put(DAG.APPLICATION_PATH, testMeta.applicationPath);
     Context.OperatorContext context =

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableComplexComponentImplTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableComplexComponentImplTest.java b/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableComplexComponentImplTest.java
index 67db6ba..96855e0 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableComplexComponentImplTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableComplexComponentImplTest.java
@@ -42,7 +42,7 @@ public class SpillableComplexComponentImplTest
   {
     simpleIntegrationTestHelper(testMeta.store);
   }
-  
+
   public void simpleIntegrationTestHelper(SpillableStateStore store)
   {
     SpillableComplexComponentImpl sccImpl = new SpillableComplexComponentImpl(store);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableSetMultimapImplTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableSetMultimapImplTest.java b/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableSetMultimapImplTest.java
index e9903ec..6b188e4 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableSetMultimapImplTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/state/spillable/SpillableSetMultimapImplTest.java
@@ -266,7 +266,7 @@ public class SpillableSetMultimapImplTest
     map.teardown();
     store.teardown();
   }
-  
+
   @Test
   public void testLoad()
   {
@@ -276,10 +276,10 @@ public class SpillableSetMultimapImplTest
     final int numOfEntry = 100000;
 
     SpillableStateStore store = testMeta.store;
-    
+
     SpillableByteArrayListMultimapImpl<String, String> multimap = new SpillableByteArrayListMultimapImpl<>(
         this.testMeta.store, ID1, 0L, new SerdeStringSlice(), new SerdeStringSlice());
-    
+
     Attribute.AttributeMap.DefaultAttributeMap attributes = new Attribute.AttributeMap.DefaultAttributeMap();
     attributes.put(DAG.APPLICATION_PATH, testMeta.applicationPath);
     Context.OperatorContext context =

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/wal/FSWindowDataManagerTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/wal/FSWindowDataManagerTest.java b/library/src/test/java/org/apache/apex/malhar/lib/wal/FSWindowDataManagerTest.java
index 9939bb9..bef1ae4 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/wal/FSWindowDataManagerTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/wal/FSWindowDataManagerTest.java
@@ -206,33 +206,33 @@ public class FSWindowDataManagerTest
     Assert.assertEquals("largest recovery window", 1, manager.getLargestCompletedWindow());
     manager.teardown();
   }
-  
+
   @Test
   public void testDelete() throws IOException
   {
     Pair<Context.OperatorContext, FSWindowDataManager> pair1 = createManagerAndContextFor(1);
     pair1.second.getWal().setMaxLength(2);
     pair1.second.setup(pair1.first);
-    
+
     Map<Integer, String> dataOf1 = Maps.newHashMap();
     dataOf1.put(1, "one");
     dataOf1.put(2, "two");
     dataOf1.put(3, "three");
-    
+
     for (int i = 1; i <= 9; ++i) {
       pair1.second.save(dataOf1, i);
     }
-    
+
     pair1.second.committed(3);
     pair1.second.teardown();
-    
+
     Pair<Context.OperatorContext, FSWindowDataManager> pair1AfterRecovery = createManagerAndContextFor(1);
     testMeta.attributes.put(Context.OperatorContext.ACTIVATION_WINDOW_ID, 1L);
     pair1AfterRecovery.second.setup(pair1AfterRecovery.first);
-    
+
     Assert.assertEquals("window 1 deleted", null, pair1AfterRecovery.second.retrieve(1));
     Assert.assertEquals("window 3 deleted", null, pair1AfterRecovery.second.retrieve(3));
-    
+
     Assert.assertEquals("window 4 exists", dataOf1, pair1AfterRecovery.second.retrieve(4));
     pair1.second.teardown();
   }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/AverageTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/AverageTest.java b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/AverageTest.java
index e5fd541..bfae223 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/AverageTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/AverageTest.java
@@ -32,7 +32,7 @@ public class AverageTest
   {
     Average ave = new Average();
     MutablePair<Double, Long> accu = ave.defaultAccumulatedValue();
-    
+
     for (int i = 1; i <= 10; i++) {
       accu = ave.accumulate(accu, (double)i);
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/FoldFnTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/FoldFnTest.java b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/FoldFnTest.java
index cda6bf8..34a1812 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/FoldFnTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/FoldFnTest.java
@@ -34,16 +34,16 @@ public class FoldFnTest
   public static class NumGen extends BaseOperator implements InputOperator
   {
     public transient DefaultOutputPort<Integer> output = new DefaultOutputPort<>();
-  
+
     public static int count = 0;
     private int i = 0;
-  
+
     public NumGen()
     {
       count = 0;
       i = 0;
     }
-  
+
     @Override
     public void emitTuples()
     {
@@ -61,11 +61,11 @@ public class FoldFnTest
       i = -1;
     }
   }
-  
+
   public static class Collector extends BaseOperator
   {
     private static int result;
-    
+
     public transient DefaultInputPort<Tuple.WindowedTuple<Integer>> input = new DefaultInputPort<Tuple.WindowedTuple<Integer>>()
     {
       @Override
@@ -74,13 +74,13 @@ public class FoldFnTest
         result = tuple.getValue();
       }
     };
-    
+
     public int getResult()
     {
       return result;
     }
   }
-  
+
   public static class Plus extends FoldFn<Integer, Integer>
   {
     @Override
@@ -88,7 +88,7 @@ public class FoldFnTest
     {
       return fold(accumulatedValue1, accumulatedValue2);
     }
-    
+
     @Override
     public Integer fold(Integer input1, Integer input2)
     {
@@ -98,11 +98,11 @@ public class FoldFnTest
       return input1 + input2;
     }
   }
-  
+
   @Test
   public void FoldFnTest()
   {
-    
+
     FoldFn<String, String> concat = new FoldFn<String, String>()
     {
       @Override
@@ -110,17 +110,17 @@ public class FoldFnTest
       {
         return fold(accumulatedValue1, accumulatedValue2);
       }
-  
+
       @Override
       public String fold(String input1, String input2)
       {
         return input1 + ", " + input2;
       }
     };
-    
+
     String[] ss = new String[]{"b", "c", "d", "e"};
     String base = "a";
-    
+
     for (String s : ss) {
       base = concat.accumulate(base, s);
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/GroupTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/GroupTest.java b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/GroupTest.java
index 891a824..d0455f8 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/GroupTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/GroupTest.java
@@ -31,7 +31,7 @@ public class GroupTest
   public void GroupTest()
   {
     Group<Integer> group = new Group<>();
-    
+
     List<Integer> accu = group.defaultAccumulatedValue();
     Assert.assertEquals(0, accu.size());
     Assert.assertEquals(1, group.accumulate(accu, 10).size());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MaxTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MaxTest.java b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MaxTest.java
index fe87d9e..5c5a761 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MaxTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MaxTest.java
@@ -31,11 +31,11 @@ public class MaxTest
   public void MaxTest()
   {
     Max<Integer> max = new Max<>();
-    
+
     Assert.assertEquals((Integer)5, max.accumulate(5, 3));
     Assert.assertEquals((Integer)6, max.accumulate(4, 6));
     Assert.assertEquals((Integer)5, max.merge(5, 2));
-  
+
     Comparator<Integer> com = new Comparator<Integer>()
     {
       @Override
@@ -44,7 +44,7 @@ public class MaxTest
         return -(o1.compareTo(o2));
       }
     };
-    
+
     max.setComparator(com);
     Assert.assertEquals((Integer)3, max.accumulate(5, 3));
     Assert.assertEquals((Integer)4, max.accumulate(4, 6));

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MinTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MinTest.java b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MinTest.java
index 3589735..7bf5c39 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MinTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/MinTest.java
@@ -31,11 +31,11 @@ public class MinTest
   public void MinTest()
   {
     Min<Integer> min = new Min<>();
-    
+
     Assert.assertEquals((Integer)3, min.accumulate(5, 3));
     Assert.assertEquals((Integer)4, min.accumulate(4, 6));
     Assert.assertEquals((Integer)2, min.merge(5, 2));
-    
+
     Comparator<Integer> com = new Comparator<Integer>()
     {
       @Override
@@ -44,7 +44,7 @@ public class MinTest
         return -(o1.compareTo(o2));
       }
     };
-    
+
     min.setComparator(com);
     Assert.assertEquals((Integer)5, min.accumulate(5, 3));
     Assert.assertEquals((Integer)6, min.accumulate(4, 6));

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/ReduceFnTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/ReduceFnTest.java b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/ReduceFnTest.java
index 26d73a7..798c16c 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/ReduceFnTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/ReduceFnTest.java
@@ -26,7 +26,7 @@ import org.junit.Test;
  */
 public class ReduceFnTest
 {
-  
+
   @Test
   public void ReduceFnTest()
   {
@@ -38,10 +38,10 @@ public class ReduceFnTest
         return input1 + ", " + input2;
       }
     };
-    
+
     String[] ss = new String[]{"b", "c", "d", "e"};
     String base = "a";
-    
+
     for (String s : ss) {
       base = concat.accumulate(base, s);
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicatesTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicatesTest.java b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicatesTest.java
index 674f871..37bfec8 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicatesTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/RemoveDuplicatesTest.java
@@ -31,7 +31,7 @@ public class RemoveDuplicatesTest
   public void RemoveDuplicatesTest()
   {
     RemoveDuplicates<Integer> rd = new RemoveDuplicates<>();
-    
+
     Set<Integer> accu = rd.defaultAccumulatedValue();
     Assert.assertEquals(0, accu.size());
     Assert.assertEquals(1, rd.accumulate(accu, 10).size());

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/SumTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/SumTest.java b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/SumTest.java
index 4c55612..4587a91 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/SumTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/SumTest.java
@@ -37,19 +37,19 @@ public class SumTest
     SumLong sl = new SumLong();
     SumFloat sf = new SumFloat();
     SumDouble sd = new SumDouble();
-    
+
     Assert.assertEquals(new MutableInt(10), si.accumulate(si.defaultAccumulatedValue(), 10));
     Assert.assertEquals(new MutableInt(11), si.accumulate(new MutableInt(1), 10));
     Assert.assertEquals(new MutableInt(22), si.merge(new MutableInt(1), new MutableInt(21)));
-    
+
     Assert.assertEquals(new MutableLong(10L), sl.accumulate(sl.defaultAccumulatedValue(), 10L));
     Assert.assertEquals(new MutableLong(22L), sl.accumulate(new MutableLong(2L), 20L));
     Assert.assertEquals(new MutableLong(41L), sl.merge(new MutableLong(32L), new MutableLong(9L)));
-    
+
     Assert.assertEquals(new MutableFloat(9.0F), sf.accumulate(sf.defaultAccumulatedValue(), 9.0F));
     Assert.assertEquals(new MutableFloat(22.5F), sf.accumulate(new MutableFloat(2.5F), 20F));
     Assert.assertEquals(new MutableFloat(41.0F), sf.merge(new MutableFloat(33.1F), new MutableFloat(7.9F)));
-    
+
     Assert.assertEquals(new MutableDouble(9.0), sd.accumulate(sd.defaultAccumulatedValue(), 9.0));
     Assert.assertEquals(new MutableDouble(22.5), sd.accumulate(new MutableDouble(2.5), 20.0));
     Assert.assertEquals(new MutableDouble(41.0), sd.merge(new MutableDouble(33.1), new MutableDouble(7.9)));

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/TopNByKeyTest.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/TopNByKeyTest.java b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/TopNByKeyTest.java
index 5bf2207..eec86e0 100644
--- a/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/TopNByKeyTest.java
+++ b/library/src/test/java/org/apache/apex/malhar/lib/window/accumulation/TopNByKeyTest.java
@@ -37,38 +37,38 @@ public class TopNByKeyTest
     TopNByKey<String, Integer> topNByKey = new TopNByKey<>();
     topNByKey.setN(3);
     Map<String, Integer> accu = topNByKey.defaultAccumulatedValue();
-  
+
     Assert.assertEquals(0, accu.size());
-    
+
     accu = topNByKey.accumulate(accu, new KeyValPair<String, Integer>("1", 1));
     accu = topNByKey.accumulate(accu, new KeyValPair<String, Integer>("3", 3));
-    
+
     List<KeyValPair<String, Integer>> result1 = new ArrayList<>();
-  
+
     result1.add(new KeyValPair<String, Integer>("3", 3));
     result1.add(new KeyValPair<String, Integer>("1", 1));
-    
+
     Assert.assertEquals(result1, topNByKey.getOutput(accu));
-    
+
     accu = topNByKey.accumulate(accu, new KeyValPair<String, Integer>("2", 2));
-  
+
     List<KeyValPair<String, Integer>> result2 = new ArrayList<>();
-  
+
     result2.add(new KeyValPair<String, Integer>("3", 3));
     result2.add(new KeyValPair<String, Integer>("2", 2));
     result2.add(new KeyValPair<String, Integer>("1", 1));
-    
+
     Assert.assertEquals(result2, topNByKey.getOutput(accu));
-    
+
     accu = topNByKey.accumulate(accu, new KeyValPair<String, Integer>("5", 5));
     accu = topNByKey.accumulate(accu, new KeyValPair<String, Integer>("4", 4));
-  
+
     List<KeyValPair<String, Integer>> result3 = new ArrayList<>();
-    
+
     result3.add(new KeyValPair<String, Integer>("5", 5));
     result3.add(new KeyValPair<String, Integer>("4", 4));
     result3.add(new KeyValPair<String, Integer>("3", 3));
-    
+
     Assert.assertEquals(result3, topNByKey.getOutput(accu));
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
index f92d9aa..005d510 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFile.java
@@ -36,7 +36,7 @@ import junit.framework.TestCase;
 
 /**
  * test tfile features.
- * 
+ *
  */
 public class TestDTFile extends TestCase
 {
@@ -323,7 +323,7 @@ public class TestDTFile extends TestCase
     scanner.seekTo(new byte[0]);
     byte[] val2 = readValue(scanner);
     assertTrue(Arrays.equals(val1, val2));
-    
+
     // check for lowerBound
     scanner.lowerBound(getSomeKey(50));
     assertTrue("locaton lookup failed", scanner.currentLocation

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
index f3479de..7e3bfc1 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestDTFileByteArrays.java
@@ -84,7 +84,7 @@ public class TestDTFileByteArrays
     this.records1stBlock = numRecords1stBlock;
     this.records2ndBlock = numRecords2ndBlock;
   }
-  
+
   public void init(String compression, String comparator)
   {
     this.compression = compression;
@@ -252,7 +252,7 @@ public class TestDTFileByteArrays
     }
     return scanner.endLocation;
   }
-  
+
   @Test
   public void testLocate() throws IOException
   {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java
index 5129d3c..23ddfbe 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java
@@ -46,11 +46,11 @@ public class TestTFileComparator2 {
   private static long cube(long n) {
     return n*n*n;
   }
-  
+
   private static String buildValue(long i) {
     return String.format("%s-%d", VALUE, i);
   }
-  
+
   @Test
   public void testSortedLongWritable() throws IOException {
     Configuration conf = new Configuration();
@@ -79,11 +79,11 @@ public class TestTFileComparator2 {
         }
       } finally {
         writer.close();
-      } 
+      }
     } finally {
       out.close();
     }
-    
+
     FSDataInputStream in = fs.open(path);
     try {
       DTFile.Reader reader = new DTFile.Reader(in, fs.getFileStatus(path)

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java
index d588484..f5e3405 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java
@@ -30,10 +30,10 @@ import org.apache.hadoop.io.file.tfile.DTFile.Writer;
 import org.junit.Assert;
 
 /**
- * 
+ *
  * Byte arrays test case class using GZ compression codec, base class of none
  * and LZO compression classes.
- * 
+ *
  */
 public class TestTFileComparators extends TestCase {
   private static String ROOT =

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java
index 301cffc..a1d6960 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java
@@ -25,10 +25,10 @@ import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.WritableComparator;
 
 /**
- * 
+ *
  * Byte arrays test case class using GZ compression codec, base class of none
  * and LZO compression classes.
- * 
+ *
  */
 
 public class TestTFileJClassComparatorByteArrays extends TestDTFileByteArrays {
@@ -54,6 +54,6 @@ class MyComparator implements RawComparator<byte[]>, Serializable {
   public int compare(byte[] o1, byte[] o2) {
     return WritableComparator.compareBytes(o1, 0, o1.length, o2, 0, o2.length);
   }
-  
+
 }
 

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java
index 7c6581d..9fb0e0b 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java
@@ -33,7 +33,7 @@ public class TestTFileLzoCodecsStreams extends TestTFileStreams {
       System.out.println("Skipped");
     }
     init(Compression.Algorithm.LZO.getName(), "memcmp");
-    if (!skip) 
+    if (!skip)
       super.setUp();
   }
 }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java
index 31e3cad..00f08de 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java
@@ -21,10 +21,10 @@ package org.apache.hadoop.io.file.tfile;
 import java.io.IOException;
 
 /**
- * 
+ *
  * Byte arrays test case class using GZ compression codec, base class of none
  * and LZO compression classes.
- * 
+ *
  */
 
 public class TestTFileNoneCodecsJClassComparatorByteArrays extends TestDTFileByteArrays {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java
index 6a3e086..5a9e706 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java
@@ -47,7 +47,7 @@ import org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner;
  * test the performance for seek.
  *
  */
-public class TestTFileSeek extends TestCase { 
+public class TestTFileSeek extends TestCase {
   private MyOptions options;
   private Configuration conf;
   private Path path;
@@ -83,12 +83,12 @@ public class TestTFileSeek extends TestCase {
         new KVGenerator(rng, true, keyLenGen, valLenGen, wordLenGen,
             options.dictSize);
   }
-  
+
   @Override
   public void tearDown() throws IOException {
     fs.delete(path, true);
   }
-  
+
   private static FSDataOutputStream createFSOutput(Path name, FileSystem fs)
     throws IOException {
     if (fs.exists(name)) {
@@ -140,7 +140,7 @@ public class TestTFileSeek extends TestCase {
     System.out.printf("time: %s...file size: %.2fMB...disk thrpt: %.2fMB/s\n",
         timer.toString(), (double) fsize / 1024 / 1024, fsize / duration);
   }
-  
+
   public void seekTFile() throws IOException {
     int miss = 0;
     long totalBytes = 0;
@@ -176,7 +176,7 @@ public class TestTFileSeek extends TestCase {
         (double) totalBytes / 1024 / (options.seekCount - miss));
 
   }
-  
+
   public void testSeeks() throws IOException {
     String[] supported = TFile.getSupportedCompressionAlgorithms();
     boolean proceed = false;
@@ -200,7 +200,7 @@ public class TestTFileSeek extends TestCase {
       seekTFile();
     }
   }
-  
+
   private static class IntegerRange {
     private final int from, to;
 
@@ -241,7 +241,7 @@ public class TestTFileSeek extends TestCase {
     int fsOutputBufferSizeNone = 1;
     int fsOutputBufferSizeLzo = 1;
     int fsOutputBufferSizeGz = 1;
-   
+
     String rootDir =
         System.getProperty("test.build.data", "target/tfile-test");
     String file = "TestTFileSeek";
@@ -401,7 +401,7 @@ public class TestTFileSeek extends TestCase {
       if (line.hasOption('o')) {
         fsOutputBufferSize = Integer.parseInt(line.getOptionValue('o'));
       }
-      
+
       if (line.hasOption('n')) {
         seekCount = Integer.parseInt(line.getOptionValue('n'));
       }
@@ -425,7 +425,7 @@ public class TestTFileSeek extends TestCase {
       if (line.hasOption('r')) {
         rootDir = line.getOptionValue('r');
       }
-      
+
       if (line.hasOption('f')) {
         file = line.getOptionValue('f');
       }
@@ -488,11 +488,11 @@ public class TestTFileSeek extends TestCase {
       return (op & OP_READ) != 0;
     }
   }
-  
+
   public static void main(String[] argv) throws IOException {
     TestTFileSeek testCase = new TestTFileSeek();
     MyOptions options = new MyOptions(argv);
-    
+
     if (options.proceed == false) {
       return;
     }

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
index a68ae45..4a01b74 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
@@ -469,7 +469,7 @@ public class TestTFileSeqFileComparison extends TestCase {
       System.out.println("Skipped for " + compress);
       return;
     }
-    
+
     options.compress = compress;
     String parameters = parameters2String(options);
     createSeqFile(parameters, compress);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java
index 2d28ae7..181be09 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java
@@ -95,14 +95,14 @@ public class TestTFileSplit extends TestCase {
     reader.close();
   }
 
-  /* Similar to readFile(), tests the scanner created 
+  /* Similar to readFile(), tests the scanner created
    * by record numbers rather than the offsets.
    */
   void readRowSplits(int numSplits) throws IOException {
 
     Reader reader =
       new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
-    
+
     long totalRecords = reader.getEntryCount();
     for (int i=0; i<numSplits; i++) {
       long startRec = i*totalRecords/numSplits;
@@ -130,11 +130,11 @@ public class TestTFileSplit extends TestCase {
     Scanner scanner = reader.createScannerByRecordNum(totalRecords, -1);
     Assert.assertTrue(scanner.atEnd());
   }
-  
+
   static String composeSortedKey(String prefix, int total, int value) {
     return String.format("%s%010d", prefix, value);
   }
-  
+
   void checkRecNums() throws IOException {
     long fileLen = fs.getFileStatus(path).getLen();
     Reader reader = new Reader(fs.open(path), fileLen, conf);
@@ -177,11 +177,11 @@ public class TestTFileSplit extends TestCase {
           .getRecordNumByLocation(reader.getLocationByRecordNum(x)));
     }
   }
-  
+
   public void testSplit() throws IOException {
     System.out.println("testSplit");
     createFile(100000, Compression.Algorithm.NONE.getName());
-    checkRecNums();   
+    checkRecNums();
     readFile();
     readRowSplits(10);
     fs.delete(path, true);

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java
----------------------------------------------------------------------
diff --git a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java
index 860a1de..a4a3a25 100644
--- a/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java
+++ b/library/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java
@@ -36,10 +36,10 @@ import org.apache.hadoop.io.file.tfile.DTFile.Reader.Scanner;
 import org.junit.Assert;
 
 /**
- * 
+ *
  * Streaming interfaces test case class using GZ compression codec, base class
  * of none and LZO compression classes.
- * 
+ *
  */
 
 public class TestTFileStreams extends TestCase {
@@ -356,7 +356,7 @@ public class TestTFileStreams extends TestCase {
 
   /**
    * Verify that the compressed data size is less than raw data size.
-   * 
+   *
    * @throws IOException
    */
   public void testFailureCompressionNotWorking() throws IOException {

http://git-wip-us.apache.org/repos/asf/apex-malhar/blob/763d14fc/stream/src/test/java/org/apache/apex/malhar/stream/FunctionOperator/FunctionOperatorTest.java
----------------------------------------------------------------------
diff --git a/stream/src/test/java/org/apache/apex/malhar/stream/FunctionOperator/FunctionOperatorTest.java b/stream/src/test/java/org/apache/apex/malhar/stream/FunctionOperator/FunctionOperatorTest.java
index 9d03f2a..a5da669 100644
--- a/stream/src/test/java/org/apache/apex/malhar/stream/FunctionOperator/FunctionOperatorTest.java
+++ b/stream/src/test/java/org/apache/apex/malhar/stream/FunctionOperator/FunctionOperatorTest.java
@@ -169,7 +169,7 @@ public class FunctionOperatorTest
     // Create local cluster
     LocalMode.Controller lc = lma.getController();
     lc.setHeartbeatMonitoringEnabled(false);
-  
+
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
       @Override
@@ -178,9 +178,9 @@ public class FunctionOperatorTest
         return TupleCount == NumTuples;
       }
     });
-    
+
     lc.run(5000);
-    
+
     Assert.assertEquals(sum, 285);
   }
 
@@ -224,7 +224,7 @@ public class FunctionOperatorTest
     // Create local cluster
     LocalMode.Controller lc = lma.getController();
     lc.setHeartbeatMonitoringEnabled(false);
-  
+
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
       @Override
@@ -235,7 +235,7 @@ public class FunctionOperatorTest
     });
 
     lc.run(5000);
-    
+
     Assert.assertEquals(sum, 39555);
   }
 
@@ -285,7 +285,7 @@ public class FunctionOperatorTest
     // Create local cluster
     LocalMode.Controller lc = lma.getController();
     lc.setHeartbeatMonitoringEnabled(false);
-  
+
     ((StramLocalCluster)lc).setExitCondition(new Callable<Boolean>()
     {
       @Override